diff --git a/checkstyle-suppressions.xml b/checkstyle-suppressions.xml new file mode 100644 index 0000000000000000000000000000000000000000..9242be3d0357a1b80004d8f818fa876870143e4f --- /dev/null +++ b/checkstyle-suppressions.xml @@ -0,0 +1,33 @@ +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one or more + ~ contributor license agreements. See the NOTICE file distributed with + ~ this work for additional information regarding copyright ownership. + ~ The ASF licenses this file to You under the Apache License, Version 2.0 + ~ (the "License"); you may not use this file except in compliance with + ~ the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<!DOCTYPE suppressions PUBLIC +"-//Puppy Crawl//DTD Suppressions 1.1//EN" +"http://www.puppycrawl.com/dtds/suppressions_1_1.dtd"> + +<!-- + + This file contains suppression rules for Checkstyle checks. + Ideally only files that cannot be modified (e.g. third-party code) + should be added here. All other violations should be fixed. + +--> + +<suppressions> +<suppress checks=".*" + files="core/src/main/java/org/apache/spark/util/collection/TimSort.java"/> +</suppressions> diff --git a/checkstyle.xml b/checkstyle.xml new file mode 100644 index 0000000000000000000000000000000000000000..a493ee443c752f09df618be0415b6bbd24f99d10 --- /dev/null +++ b/checkstyle.xml @@ -0,0 +1,164 @@ +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one or more + ~ contributor license agreements. See the NOTICE file distributed with + ~ this work for additional information regarding copyright ownership. + ~ The ASF licenses this file to You under the Apache License, Version 2.0 + ~ (the "License"); you may not use this file except in compliance with + ~ the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<!DOCTYPE module PUBLIC + "-//Puppy Crawl//DTD Check Configuration 1.3//EN" + "http://www.puppycrawl.com/dtds/configuration_1_3.dtd"> + +<!-- + + Checkstyle configuration based on the Google coding conventions from: + + - Google Java Style + https://google-styleguide.googlecode.com/svn-history/r130/trunk/javaguide.html + + with Spark-specific changes from: + + https://cwiki.apache.org/confluence/display/SPARK/Spark+Code+Style+Guide + + Checkstyle is very configurable. Be sure to read the documentation at + http://checkstyle.sf.net (or in your downloaded distribution). + + Most Checks are configurable, be sure to consult the documentation. + + To completely disable a check, just comment it out or delete it from the file. + + Authors: Max Vetrenko, Ruslan Diachenko, Roman Ivanov. + + --> + +<module name = "Checker"> + <property name="charset" value="UTF-8"/> + + <property name="severity" value="error"/> + + <property name="fileExtensions" value="java, properties, xml"/> + + <module name="SuppressionFilter"> + <property name="file" value="checkstyle-suppressions.xml"/> + </module> + + <!-- Checks for whitespace --> + <!-- See http://checkstyle.sf.net/config_whitespace.html --> + <module name="FileTabCharacter"> + <property name="eachLine" value="true"/> + </module> + + <module name="TreeWalker"> + <module name="OuterTypeFilename"/> + <module name="IllegalTokenText"> + <property name="tokens" value="STRING_LITERAL, CHAR_LITERAL"/> + <property name="format" value="\\u00(08|09|0(a|A)|0(c|C)|0(d|D)|22|27|5(C|c))|\\(0(10|11|12|14|15|42|47)|134)"/> + <property name="message" value="Avoid using corresponding octal or Unicode escape."/> + </module> + <module name="AvoidEscapedUnicodeCharacters"> + <property name="allowEscapesForControlCharacters" value="true"/> + <property name="allowByTailComment" value="true"/> + <property name="allowNonPrintableEscapes" value="true"/> + </module> + <!-- TODO: 11/09/15 disabled - the lengths are currently > 100 in many places --> + <!-- + <module name="LineLength"> + <property name="max" value="100"/> + <property name="ignorePattern" value="^package.*|^import.*|a href|href|http://|https://|ftp://"/> + </module> + --> + <module name="NoLineWrap"/> + <module name="EmptyBlock"> + <property name="option" value="TEXT"/> + <property name="tokens" value="LITERAL_TRY, LITERAL_FINALLY, LITERAL_IF, LITERAL_ELSE, LITERAL_SWITCH"/> + </module> + <module name="NeedBraces"> + <property name="allowSingleLineStatement" value="true"/> + </module> + <module name="OneStatementPerLine"/> + <module name="ArrayTypeStyle"/> + <module name="FallThrough"/> + <module name="UpperEll"/> + <module name="ModifierOrder"/> + <module name="SeparatorWrap"> + <property name="tokens" value="DOT"/> + <property name="option" value="nl"/> + </module> + <module name="SeparatorWrap"> + <property name="tokens" value="COMMA"/> + <property name="option" value="EOL"/> + </module> + <module name="PackageName"> + <property name="format" value="^[a-z]+(\.[a-z][a-z0-9]*)*$"/> + <message key="name.invalidPattern" + value="Package name ''{0}'' must match pattern ''{1}''."/> + </module> + <module name="ClassTypeParameterName"> + <property name="format" value="([A-Z][a-zA-Z0-9]*$)"/> + <message key="name.invalidPattern" + value="Class type name ''{0}'' must match pattern ''{1}''."/> + </module> + <module name="MethodTypeParameterName"> + <property name="format" value="([A-Z][a-zA-Z0-9]*)"/> + <message key="name.invalidPattern" + value="Method type name ''{0}'' must match pattern ''{1}''."/> + </module> + <module name="NoFinalizer"/> + <module name="GenericWhitespace"> + <message key="ws.followed" + value="GenericWhitespace ''{0}'' is followed by whitespace."/> + <message key="ws.preceded" + value="GenericWhitespace ''{0}'' is preceded with whitespace."/> + <message key="ws.illegalFollow" + value="GenericWhitespace ''{0}'' should followed by whitespace."/> + <message key="ws.notPreceded" + value="GenericWhitespace ''{0}'' is not preceded with whitespace."/> + </module> + <!-- TODO: 11/09/15 disabled - indentation is currently inconsistent --> + <!-- + <module name="Indentation"> + <property name="basicOffset" value="4"/> + <property name="braceAdjustment" value="0"/> + <property name="caseIndent" value="4"/> + <property name="throwsIndent" value="4"/> + <property name="lineWrappingIndentation" value="4"/> + <property name="arrayInitIndent" value="4"/> + </module> + --> + <!-- TODO: 11/09/15 disabled - order is currently wrong in many places --> + <!-- + <module name="ImportOrder"> + <property name="separated" value="true"/> + <property name="ordered" value="true"/> + <property name="groups" value="/^javax?\./,scala,*,org.apache.spark"/> + </module> + --> + <module name="MethodParamPad"/> + <module name="AnnotationLocation"> + <property name="tokens" value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, METHOD_DEF, CTOR_DEF"/> + </module> + <module name="AnnotationLocation"> + <property name="tokens" value="VARIABLE_DEF"/> + <property name="allowSamelineMultipleAnnotations" value="true"/> + </module> + <module name="MethodName"> + <property name="format" value="^[a-z][a-z0-9][a-zA-Z0-9_]*$"/> + <message key="name.invalidPattern" + value="Method name ''{0}'' must match pattern ''{1}''."/> + </module> + <module name="EmptyCatchBlock"> + <property name="exceptionVariableName" value="expected"/> + </module> + <module name="CommentsIndentation"/> + </module> +</module> diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java index c91e88f31bf9b869461746852ff1e658a09f9e28..c16cbce9a0f6c0362adc71b04ed3104b377b7ffb 100644 --- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java +++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java @@ -175,7 +175,7 @@ public final class UnsafeInMemorySorter { this.position = 0; } - public SortedIterator clone () { + public SortedIterator clone() { SortedIterator iter = new SortedIterator(numRecords); iter.position = position; iter.baseObject = baseObject; diff --git a/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java b/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java index d87a1d2a56d99f0db60bbd7e04bfce28a3f4b118..a5c583f9f2844df0333104972ad471dadbcbbd22 100644 --- a/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java +++ b/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java @@ -356,8 +356,8 @@ public abstract class AbstractBytesToBytesMapSuite { final java.util.BitSet valuesSeen = new java.util.BitSet(NUM_ENTRIES); final Iterator<BytesToBytesMap.Location> iter = map.iterator(); - final long key[] = new long[KEY_LENGTH / 8]; - final long value[] = new long[VALUE_LENGTH / 8]; + final long[] key = new long[KEY_LENGTH / 8]; + final long[] value = new long[VALUE_LENGTH / 8]; while (iter.hasNext()) { final BytesToBytesMap.Location loc = iter.next(); Assert.assertTrue(loc.isDefined()); diff --git a/dev/lint-java b/dev/lint-java new file mode 100755 index 0000000000000000000000000000000000000000..fe8ab83d562d174176025bec80f569330e523d07 --- /dev/null +++ b/dev/lint-java @@ -0,0 +1,30 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )" +SPARK_ROOT_DIR="$(dirname $SCRIPT_DIR)" + +ERRORS=$($SCRIPT_DIR/../build/mvn -Pkinesis-asl -Pyarn -Phive -Phive-thriftserver checkstyle:check | grep ERROR) + +if test ! -z "$ERRORS"; then + echo -e "Checkstyle checks failed at following occurrences:\n$ERRORS" + exit 1 +else + echo -e "Checkstyle checks passed." +fi diff --git a/dev/run-tests-jenkins.py b/dev/run-tests-jenkins.py index 4f390ef1eaa3209238f2d056036d63e014bb709c..7aecea25b20999815bbee3943b534b65ca94eb4b 100755 --- a/dev/run-tests-jenkins.py +++ b/dev/run-tests-jenkins.py @@ -119,6 +119,7 @@ def run_tests(tests_timeout): ERROR_CODES["BLOCK_GENERAL"]: 'some tests', ERROR_CODES["BLOCK_RAT"]: 'RAT tests', ERROR_CODES["BLOCK_SCALA_STYLE"]: 'Scala style tests', + ERROR_CODES["BLOCK_JAVA_STYLE"]: 'Java style tests', ERROR_CODES["BLOCK_PYTHON_STYLE"]: 'Python style tests', ERROR_CODES["BLOCK_R_STYLE"]: 'R style tests', ERROR_CODES["BLOCK_DOCUMENTATION"]: 'to generate documentation', diff --git a/dev/run-tests.py b/dev/run-tests.py index 9e1abb069719269af05febc3749681a20c134393..e7e10f1d8c725dd3b7cba7d2150b0b45cab59d70 100755 --- a/dev/run-tests.py +++ b/dev/run-tests.py @@ -198,6 +198,11 @@ def run_scala_style_checks(): run_cmd([os.path.join(SPARK_HOME, "dev", "lint-scala")]) +def run_java_style_checks(): + set_title_and_block("Running Java style checks", "BLOCK_JAVA_STYLE") + run_cmd([os.path.join(SPARK_HOME, "dev", "lint-java")]) + + def run_python_style_checks(): set_title_and_block("Running Python style checks", "BLOCK_PYTHON_STYLE") run_cmd([os.path.join(SPARK_HOME, "dev", "lint-python")]) @@ -522,6 +527,8 @@ def main(): # style checks if not changed_files or any(f.endswith(".scala") for f in changed_files): run_scala_style_checks() + if not changed_files or any(f.endswith(".java") for f in changed_files): + run_java_style_checks() if not changed_files or any(f.endswith(".py") for f in changed_files): run_python_style_checks() if not changed_files or any(f.endswith(".R") for f in changed_files): diff --git a/dev/sparktestsupport/__init__.py b/dev/sparktestsupport/__init__.py index 8ab6d9e37ca2fe224b033e3f733a2e079914150b..0e8032d13341e97c38eb2326c31d49acc09d8936 100644 --- a/dev/sparktestsupport/__init__.py +++ b/dev/sparktestsupport/__init__.py @@ -31,5 +31,6 @@ ERROR_CODES = { "BLOCK_SPARK_UNIT_TESTS": 18, "BLOCK_PYSPARK_UNIT_TESTS": 19, "BLOCK_SPARKR_UNIT_TESTS": 20, + "BLOCK_JAVA_STYLE": 21, "BLOCK_TIMEOUT": 124 } diff --git a/examples/src/main/java/org/apache/spark/examples/ml/JavaSimpleParamsExample.java b/examples/src/main/java/org/apache/spark/examples/ml/JavaSimpleParamsExample.java index 94beeced3d479d53adb7fbf3dabb5fedc2d8929e..ea83e8fef9eb918d8ced287dc875b91e3b9f3c63 100644 --- a/examples/src/main/java/org/apache/spark/examples/ml/JavaSimpleParamsExample.java +++ b/examples/src/main/java/org/apache/spark/examples/ml/JavaSimpleParamsExample.java @@ -77,7 +77,7 @@ public class JavaSimpleParamsExample { ParamMap paramMap = new ParamMap(); paramMap.put(lr.maxIter().w(20)); // Specify 1 Param. paramMap.put(lr.maxIter(), 30); // This overwrites the original maxIter. - double thresholds[] = {0.45, 0.55}; + double[] thresholds = {0.45, 0.55}; paramMap.put(lr.regParam().w(0.1), lr.thresholds().w(thresholds)); // Specify multiple Params. // One can also combine ParamMaps. diff --git a/examples/src/main/java/org/apache/spark/examples/mllib/JavaLDAExample.java b/examples/src/main/java/org/apache/spark/examples/mllib/JavaLDAExample.java index fd53c81cc49747da28ff957c05c4e37308d06f07..de8e739ac92565d5876259e631de8552f7ebad28 100644 --- a/examples/src/main/java/org/apache/spark/examples/mllib/JavaLDAExample.java +++ b/examples/src/main/java/org/apache/spark/examples/mllib/JavaLDAExample.java @@ -41,8 +41,9 @@ public class JavaLDAExample { public Vector call(String s) { String[] sarray = s.trim().split(" "); double[] values = new double[sarray.length]; - for (int i = 0; i < sarray.length; i++) + for (int i = 0; i < sarray.length; i++) { values[i] = Double.parseDouble(sarray[i]); + } return Vectors.dense(values); } } diff --git a/examples/src/main/java/org/apache/spark/examples/mllib/JavaMultiLabelClassificationMetricsExample.java b/examples/src/main/java/org/apache/spark/examples/mllib/JavaMultiLabelClassificationMetricsExample.java index b54e1ea3f2bcfa6ddc0965991e0c5720a9c586a6..5ba01e0d088169c93443483fcad89117815ff7bb 100644 --- a/examples/src/main/java/org/apache/spark/examples/mllib/JavaMultiLabelClassificationMetricsExample.java +++ b/examples/src/main/java/org/apache/spark/examples/mllib/JavaMultiLabelClassificationMetricsExample.java @@ -57,12 +57,12 @@ public class JavaMultiLabelClassificationMetricsExample { // Stats by labels for (int i = 0; i < metrics.labels().length - 1; i++) { - System.out.format("Class %1.1f precision = %f\n", metrics.labels()[i], metrics.precision - (metrics.labels()[i])); - System.out.format("Class %1.1f recall = %f\n", metrics.labels()[i], metrics.recall(metrics - .labels()[i])); - System.out.format("Class %1.1f F1 score = %f\n", metrics.labels()[i], metrics.f1Measure - (metrics.labels()[i])); + System.out.format("Class %1.1f precision = %f\n", metrics.labels()[i], metrics.precision( + metrics.labels()[i])); + System.out.format("Class %1.1f recall = %f\n", metrics.labels()[i], metrics.recall( + metrics.labels()[i])); + System.out.format("Class %1.1f F1 score = %f\n", metrics.labels()[i], metrics.f1Measure( + metrics.labels()[i])); } // Micro stats diff --git a/examples/src/main/java/org/apache/spark/examples/mllib/JavaMulticlassClassificationMetricsExample.java b/examples/src/main/java/org/apache/spark/examples/mllib/JavaMulticlassClassificationMetricsExample.java index 21f628fb51b6ee874fb941de89cf5feed4fa35bc..5247c9c7486185a6bf26bb49cda45c26d0dc260d 100644 --- a/examples/src/main/java/org/apache/spark/examples/mllib/JavaMulticlassClassificationMetricsExample.java +++ b/examples/src/main/java/org/apache/spark/examples/mllib/JavaMulticlassClassificationMetricsExample.java @@ -74,12 +74,12 @@ public class JavaMulticlassClassificationMetricsExample { // Stats by labels for (int i = 0; i < metrics.labels().length; i++) { - System.out.format("Class %f precision = %f\n", metrics.labels()[i],metrics.precision - (metrics.labels()[i])); - System.out.format("Class %f recall = %f\n", metrics.labels()[i], metrics.recall(metrics - .labels()[i])); - System.out.format("Class %f F1 score = %f\n", metrics.labels()[i], metrics.fMeasure - (metrics.labels()[i])); + System.out.format("Class %f precision = %f\n", metrics.labels()[i],metrics.precision( + metrics.labels()[i])); + System.out.format("Class %f recall = %f\n", metrics.labels()[i], metrics.recall( + metrics.labels()[i])); + System.out.format("Class %f F1 score = %f\n", metrics.labels()[i], metrics.fMeasure( + metrics.labels()[i])); } //Weighted stats diff --git a/examples/src/main/java/org/apache/spark/examples/mllib/JavaRankingMetricsExample.java b/examples/src/main/java/org/apache/spark/examples/mllib/JavaRankingMetricsExample.java index 7c4c97e74681f4547f540c1d238a44829f25d249..47ab3fc358246a0130f90a141519731f3244bd89 100644 --- a/examples/src/main/java/org/apache/spark/examples/mllib/JavaRankingMetricsExample.java +++ b/examples/src/main/java/org/apache/spark/examples/mllib/JavaRankingMetricsExample.java @@ -120,8 +120,8 @@ public class JavaRankingMetricsExample { } } ); - JavaRDD<Tuple2<List<Integer>, List<Integer>>> relevantDocs = userMoviesList.join - (userRecommendedList).values(); + JavaRDD<Tuple2<List<Integer>, List<Integer>>> relevantDocs = userMoviesList.join( + userRecommendedList).values(); // Instantiate the metrics object RankingMetrics metrics = RankingMetrics.of(relevantDocs); diff --git a/examples/src/main/java/org/apache/spark/examples/mllib/JavaRecommendationExample.java b/examples/src/main/java/org/apache/spark/examples/mllib/JavaRecommendationExample.java index 1065fde953b965c78019a0a6d36655b84a71a10b..c179e7578cdfad9f34ca57f2665feac857a86d32 100644 --- a/examples/src/main/java/org/apache/spark/examples/mllib/JavaRecommendationExample.java +++ b/examples/src/main/java/org/apache/spark/examples/mllib/JavaRecommendationExample.java @@ -29,7 +29,7 @@ import org.apache.spark.SparkConf; // $example off$ public class JavaRecommendationExample { - public static void main(String args[]) { + public static void main(String[] args) { // $example on$ SparkConf conf = new SparkConf().setAppName("Java Collaborative Filtering Example"); JavaSparkContext jsc = new JavaSparkContext(conf); diff --git a/examples/src/main/java/org/apache/spark/examples/mllib/JavaRegressionMetricsExample.java b/examples/src/main/java/org/apache/spark/examples/mllib/JavaRegressionMetricsExample.java index d2efc6bf97776fb9d749ba535634b42d78e3ac4e..4e89dd0c37c520c2691d36e2df2e4b65744d17ed 100644 --- a/examples/src/main/java/org/apache/spark/examples/mllib/JavaRegressionMetricsExample.java +++ b/examples/src/main/java/org/apache/spark/examples/mllib/JavaRegressionMetricsExample.java @@ -43,8 +43,9 @@ public class JavaRegressionMetricsExample { public LabeledPoint call(String line) { String[] parts = line.split(" "); double[] v = new double[parts.length - 1]; - for (int i = 1; i < parts.length - 1; i++) + for (int i = 1; i < parts.length - 1; i++) { v[i - 1] = Double.parseDouble(parts[i].split(":")[1]); + } return new LabeledPoint(Double.parseDouble(parts[0]), Vectors.dense(v)); } } diff --git a/examples/src/main/java/org/apache/spark/examples/streaming/JavaSqlNetworkWordCount.java b/examples/src/main/java/org/apache/spark/examples/streaming/JavaSqlNetworkWordCount.java index 46562ddbbcb574193423f2ad53265e226480c869..3515d7be45d372f311dc646fbc3dbf517afb5c06 100644 --- a/examples/src/main/java/org/apache/spark/examples/streaming/JavaSqlNetworkWordCount.java +++ b/examples/src/main/java/org/apache/spark/examples/streaming/JavaSqlNetworkWordCount.java @@ -112,8 +112,8 @@ public final class JavaSqlNetworkWordCount { /** Lazily instantiated singleton instance of SQLContext */ class JavaSQLContextSingleton { - static private transient SQLContext instance = null; - static public SQLContext getInstance(SparkContext sparkContext) { + private static transient SQLContext instance = null; + public static SQLContext getInstance(SparkContext sparkContext) { if (instance == null) { instance = new SQLContext(sparkContext); } diff --git a/mllib/src/test/java/org/apache/spark/ml/feature/JavaStringIndexerSuite.java b/mllib/src/test/java/org/apache/spark/ml/feature/JavaStringIndexerSuite.java index 6b2c48ef1c342f6109e1cc968f1dfb7f3158662d..b2df79ba74feb7a02422ca90253aaf99984a0433 100644 --- a/mllib/src/test/java/org/apache/spark/ml/feature/JavaStringIndexerSuite.java +++ b/mllib/src/test/java/org/apache/spark/ml/feature/JavaStringIndexerSuite.java @@ -58,7 +58,7 @@ public class JavaStringIndexerSuite { createStructField("label", StringType, false) }); List<Row> data = Arrays.asList( - c(0, "a"), c(1, "b"), c(2, "c"), c(3, "a"), c(4, "a"), c(5, "c")); + cr(0, "a"), cr(1, "b"), cr(2, "c"), cr(3, "a"), cr(4, "a"), cr(5, "c")); DataFrame dataset = sqlContext.createDataFrame(data, schema); StringIndexer indexer = new StringIndexer() @@ -67,12 +67,12 @@ public class JavaStringIndexerSuite { DataFrame output = indexer.fit(dataset).transform(dataset); Assert.assertArrayEquals( - new Row[] { c(0, 0.0), c(1, 2.0), c(2, 1.0), c(3, 0.0), c(4, 0.0), c(5, 1.0) }, + new Row[] { cr(0, 0.0), cr(1, 2.0), cr(2, 1.0), cr(3, 0.0), cr(4, 0.0), cr(5, 1.0) }, output.orderBy("id").select("id", "labelIndex").collect()); } /** An alias for RowFactory.create. */ - private Row c(Object... values) { + private Row cr(Object... values) { return RowFactory.create(values); } } diff --git a/mllib/src/test/java/org/apache/spark/mllib/clustering/JavaLDASuite.java b/mllib/src/test/java/org/apache/spark/mllib/clustering/JavaLDASuite.java index 3fea359a3b46c6cc91029e5959ebc46c47f0e305..225a216270b3bbe6005e70c870a14f0cc995760a 100644 --- a/mllib/src/test/java/org/apache/spark/mllib/clustering/JavaLDASuite.java +++ b/mllib/src/test/java/org/apache/spark/mllib/clustering/JavaLDASuite.java @@ -144,7 +144,7 @@ public class JavaLDASuite implements Serializable { } @Test - public void OnlineOptimizerCompatibility() { + public void onlineOptimizerCompatibility() { int k = 3; double topicSmoothing = 1.2; double termSmoothing = 1.2; diff --git a/network/shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java b/network/shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java index 0d4dd6afac769621e51064bbf338fbda323281df..e5cb68c8a4dbb65565294a386b681710864cd051 100644 --- a/network/shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java +++ b/network/shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java @@ -419,7 +419,7 @@ public class ExternalShuffleBlockResolver { public static class StoreVersion { - final static byte[] KEY = "StoreVersion".getBytes(Charsets.UTF_8); + static final byte[] KEY = "StoreVersion".getBytes(Charsets.UTF_8); public final int major; public final int minor; diff --git a/network/shuffle/src/test/java/org/apache/spark/network/sasl/SaslIntegrationSuite.java b/network/shuffle/src/test/java/org/apache/spark/network/sasl/SaslIntegrationSuite.java index 19c870aebb023a91ab85aa3f9dd339af7db36e4f..f573d962fe36160747f21d8ce0afd757f652d6fb 100644 --- a/network/shuffle/src/test/java/org/apache/spark/network/sasl/SaslIntegrationSuite.java +++ b/network/shuffle/src/test/java/org/apache/spark/network/sasl/SaslIntegrationSuite.java @@ -61,7 +61,7 @@ public class SaslIntegrationSuite { // Use a long timeout to account for slow / overloaded build machines. In the normal case, // tests should finish way before the timeout expires. - private final static long TIMEOUT_MS = 10_000; + private static final long TIMEOUT_MS = 10_000; static TransportServer server; static TransportConf conf; diff --git a/pom.xml b/pom.xml index 234fd5dea1a6e47566273ed7cdb81e7f7dd5bad7..16e656d11961d102673c130f9a6067ebf4209ea2 100644 --- a/pom.xml +++ b/pom.xml @@ -2256,6 +2256,30 @@ </execution> </executions> </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-checkstyle-plugin</artifactId> + <version>2.17</version> + <configuration> + <verbose>false</verbose> + <failOnViolation>false</failOnViolation> + <includeTestSourceDirectory>true</includeTestSourceDirectory> + <failOnWarning>false</failOnWarning> + <sourceDirectory>${basedir}/src/main/java</sourceDirectory> + <testSourceDirectory>${basedir}/src/test/java</testSourceDirectory> + <configLocation>checkstyle.xml</configLocation> + <outputFile>${basedir}/target/checkstyle-output.xml</outputFile> + <inputEncoding>${project.build.sourceEncoding}</inputEncoding> + <outputEncoding>${project.reporting.outputEncoding}</outputEncoding> + </configuration> + <executions> + <execution> + <goals> + <goal>check</goal> + </goals> + </execution> + </executions> + </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/execution/UnsafeExternalRowSorter.java b/sql/catalyst/src/main/java/org/apache/spark/sql/execution/UnsafeExternalRowSorter.java index 3986d6e18f770ffca5cbbb992a0c1e4fba819a36..352002b3499a23d4430e9ff951b414de8cd13f45 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/execution/UnsafeExternalRowSorter.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/execution/UnsafeExternalRowSorter.java @@ -51,7 +51,7 @@ final class UnsafeExternalRowSorter { private final PrefixComputer prefixComputer; private final UnsafeExternalSorter sorter; - public static abstract class PrefixComputer { + public abstract static class PrefixComputer { abstract long computePrefix(InternalRow row); } diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java b/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java index df64a878b6b36496b7bd71e3a5bb342830aa98eb..1e4e5ede8cc11883dc059895468b3eb6caaa78b6 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java @@ -41,5 +41,5 @@ public @interface SQLUserDefinedType { * Returns an instance of the UserDefinedType which can serialize and deserialize the user * class to and from Catalyst built-in types. */ - Class<? extends UserDefinedType<?> > udt(); + Class<? extends UserDefinedType<?>> udt(); } diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java index 2ed30c1f5a8d9e4a9e212537f949aa6a6ef283f2..842dcb8c93dc2ca563349fb4f32761a6738c486d 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java +++ b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java @@ -195,7 +195,7 @@ public abstract class SpecificParquetRecordReaderBase<T> extends RecordReader<Vo * Creates a reader for definition and repetition levels, returning an optimized one if * the levels are not needed. */ - static protected IntIterator createRLEIterator(int maxLevel, BytesInput bytes, + protected static IntIterator createRLEIterator(int maxLevel, BytesInput bytes, ColumnDescriptor descriptor) throws IOException { try { if (maxLevel == 0) return new NullIntIterator(); diff --git a/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java b/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java index e010112bb93275118ab393961f7790874371d318..4ef1f276d1bbbc1fe26551fc09dace15af948490 100644 --- a/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java +++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java @@ -489,6 +489,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields } break; + default: } } @@ -512,6 +513,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields case M_STRING_STRING: return getMStringString(); + default: } throw new IllegalStateException(); } @@ -535,75 +537,91 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields return isSetLintString(); case M_STRING_STRING: return isSetMStringString(); + default: } throw new IllegalStateException(); } @Override public boolean equals(Object that) { - if (that == null) + if (that == null) { return false; - if (that instanceof Complex) + } + if (that instanceof Complex) { return this.equals((Complex)that); + } return false; } public boolean equals(Complex that) { - if (that == null) + if (that == null) { return false; + } boolean this_present_aint = true; boolean that_present_aint = true; if (this_present_aint || that_present_aint) { - if (!(this_present_aint && that_present_aint)) + if (!(this_present_aint && that_present_aint)) { return false; - if (this.aint != that.aint) + } + if (this.aint != that.aint) { return false; + } } boolean this_present_aString = true && this.isSetAString(); boolean that_present_aString = true && that.isSetAString(); if (this_present_aString || that_present_aString) { - if (!(this_present_aString && that_present_aString)) + if (!(this_present_aString && that_present_aString)) { return false; - if (!this.aString.equals(that.aString)) + } + if (!this.aString.equals(that.aString)) { return false; + } } boolean this_present_lint = true && this.isSetLint(); boolean that_present_lint = true && that.isSetLint(); if (this_present_lint || that_present_lint) { - if (!(this_present_lint && that_present_lint)) + if (!(this_present_lint && that_present_lint)) { return false; - if (!this.lint.equals(that.lint)) + } + if (!this.lint.equals(that.lint)) { return false; + } } boolean this_present_lString = true && this.isSetLString(); boolean that_present_lString = true && that.isSetLString(); if (this_present_lString || that_present_lString) { - if (!(this_present_lString && that_present_lString)) + if (!(this_present_lString && that_present_lString)) { return false; - if (!this.lString.equals(that.lString)) + } + if (!this.lString.equals(that.lString)) { return false; + } } boolean this_present_lintString = true && this.isSetLintString(); boolean that_present_lintString = true && that.isSetLintString(); if (this_present_lintString || that_present_lintString) { - if (!(this_present_lintString && that_present_lintString)) + if (!(this_present_lintString && that_present_lintString)) { return false; - if (!this.lintString.equals(that.lintString)) + } + if (!this.lintString.equals(that.lintString)) { return false; + } } boolean this_present_mStringString = true && this.isSetMStringString(); boolean that_present_mStringString = true && that.isSetMStringString(); if (this_present_mStringString || that_present_mStringString) { - if (!(this_present_mStringString && that_present_mStringString)) + if (!(this_present_mStringString && that_present_mStringString)) { return false; - if (!this.mStringString.equals(that.mStringString)) + } + if (!this.mStringString.equals(that.mStringString)) { return false; + } } return true; @@ -615,33 +633,39 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields boolean present_aint = true; builder.append(present_aint); - if (present_aint) + if (present_aint) { builder.append(aint); + } boolean present_aString = true && (isSetAString()); builder.append(present_aString); - if (present_aString) + if (present_aString) { builder.append(aString); + } boolean present_lint = true && (isSetLint()); builder.append(present_lint); - if (present_lint) + if (present_lint) { builder.append(lint); + } boolean present_lString = true && (isSetLString()); builder.append(present_lString); - if (present_lString) + if (present_lString) { builder.append(lString); + } boolean present_lintString = true && (isSetLintString()); builder.append(present_lintString); - if (present_lintString) + if (present_lintString) { builder.append(lintString); + } boolean present_mStringString = true && (isSetMStringString()); builder.append(present_mStringString); - if (present_mStringString) + if (present_mStringString) { builder.append(mStringString); + } return builder.toHashCode(); } @@ -737,7 +761,9 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields sb.append("aint:"); sb.append(this.aint); first = false; - if (!first) sb.append(", "); + if (!first) { + sb.append(", "); + } sb.append("aString:"); if (this.aString == null) { sb.append("null"); @@ -745,7 +771,9 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields sb.append(this.aString); } first = false; - if (!first) sb.append(", "); + if (!first) { + sb.append(", "); + } sb.append("lint:"); if (this.lint == null) { sb.append("null"); @@ -753,7 +781,9 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields sb.append(this.lint); } first = false; - if (!first) sb.append(", "); + if (!first) { + sb.append(", "); + } sb.append("lString:"); if (this.lString == null) { sb.append("null"); @@ -761,7 +791,9 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields sb.append(this.lString); } first = false; - if (!first) sb.append(", "); + if (!first) { + sb.append(", "); + } sb.append("lintString:"); if (this.lintString == null) { sb.append("null"); @@ -769,7 +801,9 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields sb.append(this.lintString); } first = false; - if (!first) sb.append(", "); + if (!first) { + sb.append(", "); + } sb.append("mStringString:"); if (this.mStringString == null) { sb.append("null"); diff --git a/streaming/src/main/java/org/apache/spark/streaming/util/WriteAheadLog.java b/streaming/src/main/java/org/apache/spark/streaming/util/WriteAheadLog.java index 3738fc1a235c22a36d5dd24e97ddcd2d82ac6b38..2803cad8095ddd6c31d87b3ce981d34512a08155 100644 --- a/streaming/src/main/java/org/apache/spark/streaming/util/WriteAheadLog.java +++ b/streaming/src/main/java/org/apache/spark/streaming/util/WriteAheadLog.java @@ -37,26 +37,26 @@ public abstract class WriteAheadLog { * ensure that the written data is durable and readable (using the record handle) by the * time this function returns. */ - abstract public WriteAheadLogRecordHandle write(ByteBuffer record, long time); + public abstract WriteAheadLogRecordHandle write(ByteBuffer record, long time); /** * Read a written record based on the given record handle. */ - abstract public ByteBuffer read(WriteAheadLogRecordHandle handle); + public abstract ByteBuffer read(WriteAheadLogRecordHandle handle); /** * Read and return an iterator of all the records that have been written but not yet cleaned up. */ - abstract public Iterator<ByteBuffer> readAll(); + public abstract Iterator<ByteBuffer> readAll(); /** * Clean all the records that are older than the threshold time. It can wait for * the completion of the deletion. */ - abstract public void clean(long threshTime, boolean waitForCompletion); + public abstract void clean(long threshTime, boolean waitForCompletion); /** * Close this log and release any resources. */ - abstract public void close(); + public abstract void close(); } diff --git a/streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java b/streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java index 609bb4413b6b1a9c82410fad133b81f6a0de8d35..9722c60bba1c3ebd7c256d7feb1b3a08c30e3d60 100644 --- a/streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java +++ b/streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java @@ -1332,12 +1332,12 @@ public class JavaAPISuite extends LocalJavaStreamingContext implements Serializa public void testUpdateStateByKeyWithInitial() { List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream; - List<Tuple2<String, Integer>> initial = Arrays.asList ( + List<Tuple2<String, Integer>> initial = Arrays.asList( new Tuple2<>("california", 1), new Tuple2<>("new york", 2)); JavaRDD<Tuple2<String, Integer>> tmpRDD = ssc.sparkContext().parallelize(initial); - JavaPairRDD<String, Integer> initialRDD = JavaPairRDD.fromJavaRDD (tmpRDD); + JavaPairRDD<String, Integer> initialRDD = JavaPairRDD.fromJavaRDD(tmpRDD); List<List<Tuple2<String, Integer>>> expected = Arrays.asList( Arrays.asList(new Tuple2<>("california", 5), diff --git a/streaming/src/test/java/org/apache/spark/streaming/JavaTrackStateByKeySuite.java b/streaming/src/test/java/org/apache/spark/streaming/JavaTrackStateByKeySuite.java index eac4cdd14a683657962bd3f09ccf4332b2374d88..89d0bb7b617e46b31f2f1a012dea5ea391cb2845 100644 --- a/streaming/src/test/java/org/apache/spark/streaming/JavaTrackStateByKeySuite.java +++ b/streaming/src/test/java/org/apache/spark/streaming/JavaTrackStateByKeySuite.java @@ -95,7 +95,7 @@ public class JavaTrackStateByKeySuite extends LocalJavaStreamingContext implemen JavaTrackStateDStream<String, Integer, Boolean, Double> stateDstream2 = wordsDstream.trackStateByKey( - StateSpec.<String, Integer, Boolean, Double> function(trackStateFunc2) + StateSpec.<String, Integer, Boolean, Double>function(trackStateFunc2) .initialState(initialRDD) .numPartitions(10) .partitioner(new HashPartitioner(10)) diff --git a/tags/src/main/java/org/apache/spark/tags/ExtendedHiveTest.java b/tags/src/main/java/org/apache/spark/tags/ExtendedHiveTest.java index 1b0c416b0fe4e4d1c57f2fefe7948d264c1d57cc..83279e5e93c0e007f33a3b7b5c2678b47ad239ec 100644 --- a/tags/src/main/java/org/apache/spark/tags/ExtendedHiveTest.java +++ b/tags/src/main/java/org/apache/spark/tags/ExtendedHiveTest.java @@ -18,6 +18,7 @@ package org.apache.spark.tags; import java.lang.annotation.*; + import org.scalatest.TagAnnotation; @TagAnnotation diff --git a/tags/src/main/java/org/apache/spark/tags/ExtendedYarnTest.java b/tags/src/main/java/org/apache/spark/tags/ExtendedYarnTest.java index 2a631bfc88cf076f3bc753b3f0660ed257659a25..108300168e173fb7aa512cbe8745b95910f3c0ef 100644 --- a/tags/src/main/java/org/apache/spark/tags/ExtendedYarnTest.java +++ b/tags/src/main/java/org/apache/spark/tags/ExtendedYarnTest.java @@ -18,6 +18,7 @@ package org.apache.spark.tags; import java.lang.annotation.*; + import org.scalatest.TagAnnotation; @TagAnnotation diff --git a/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java b/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java index 4bd3fd7772079f4a326e5ae21d0cbc05c6db3328..5b61386808769cd2126055f276aa0aa63a8c6704 100644 --- a/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java +++ b/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java @@ -900,9 +900,9 @@ public final class UTF8String implements Comparable<UTF8String>, Externalizable, m = swap; } - int p[] = new int[n + 1]; - int d[] = new int[n + 1]; - int swap[]; + int[] p = new int[n + 1]; + int[] d = new int[n + 1]; + int[] swap; int i, i_bytes, j, j_bytes, num_bytes_j, cost; @@ -965,7 +965,7 @@ public final class UTF8String implements Comparable<UTF8String>, Externalizable, // first character must be a letter return this; } - byte sx[] = {'0', '0', '0', '0'}; + byte[] sx = {'0', '0', '0', '0'}; sx[0] = b; int sxi = 1; int idx = b - 'A';