[SPARK-6990][BUILD] Add Java linting script; fix minor warnings

This replaces https://github.com/apache/spark/pull/9696

Invoke Checkstyle and print any errors to the console, failing the step.
Use Google's style rules modified according to
https://cwiki.apache.org/confluence/display/SPARK/Spark+Code+Style+Guide
Some important checks are disabled (see TODOs in `checkstyle.xml`) due to
multiple violations being present in the codebase.

Suggest fixing those TODOs in a separate PR(s).

More on Checkstyle can be found on the [official website](http://checkstyle.sourceforge.net/).

Sample output (from [build 46345](https://amplab.cs.berkeley.edu/jenkins/job/SparkPullRequestBuilder/46345/consoleFull)) (duplicated because I run the build twice with different profiles):

> Checkstyle checks failed at following occurrences:
[ERROR] src/main/java/org/apache/spark/sql/execution/datasources/parquet/UnsafeRowParquetRecordReader.java:[217,7] (coding) MissingSwitchDefault: switch without "default" clause.
> [ERROR] src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java:[198,10] (modifier) ModifierOrder: 'protected' modifier out of order with the JLS suggestions.
> [ERROR] src/main/java/org/apache/spark/sql/execution/datasources/parquet/UnsafeRowParquetRecordReader.java:[217,7] (coding) MissingSwitchDefault: switch without "default" clause.
> [ERROR] src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java:[198,10] (modifier) ModifierOrder: 'protected' modifier out of order with the JLS suggestions.
> [error] running /home/jenkins/workspace/SparkPullRequestBuilder2/dev/lint-java ; received return code 1

Also fix some of the minor violations that didn't require sweeping changes.

Apologies for the previous botched PRs - I finally figured out the issue.

cr: JoshRosen, pwendell

> I state that the contribution is my original work, and I license the work to the project under the project's open source license.

Author: Dmitry Erastov <derastov@gmail.com>

Closes #9867 from dskrvk/master.
This commit is contained in:
Dmitry Erastov 2015-12-04 12:03:45 -08:00 committed by Josh Rosen
parent 95296d9b1a
commit d0d8222778
31 changed files with 368 additions and 70 deletions

View file

@ -0,0 +1,33 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE suppressions PUBLIC
"-//Puppy Crawl//DTD Suppressions 1.1//EN"
"http://www.puppycrawl.com/dtds/suppressions_1_1.dtd">
<!--
This file contains suppression rules for Checkstyle checks.
Ideally only files that cannot be modified (e.g. third-party code)
should be added here. All other violations should be fixed.
-->
<suppressions>
<suppress checks=".*"
files="core/src/main/java/org/apache/spark/util/collection/TimSort.java"/>
</suppressions>

164
checkstyle.xml Normal file
View file

@ -0,0 +1,164 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE module PUBLIC
"-//Puppy Crawl//DTD Check Configuration 1.3//EN"
"http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
<!--
Checkstyle configuration based on the Google coding conventions from:
- Google Java Style
https://google-styleguide.googlecode.com/svn-history/r130/trunk/javaguide.html
with Spark-specific changes from:
https://cwiki.apache.org/confluence/display/SPARK/Spark+Code+Style+Guide
Checkstyle is very configurable. Be sure to read the documentation at
http://checkstyle.sf.net (or in your downloaded distribution).
Most Checks are configurable, be sure to consult the documentation.
To completely disable a check, just comment it out or delete it from the file.
Authors: Max Vetrenko, Ruslan Diachenko, Roman Ivanov.
-->
<module name = "Checker">
<property name="charset" value="UTF-8"/>
<property name="severity" value="error"/>
<property name="fileExtensions" value="java, properties, xml"/>
<module name="SuppressionFilter">
<property name="file" value="checkstyle-suppressions.xml"/>
</module>
<!-- Checks for whitespace -->
<!-- See http://checkstyle.sf.net/config_whitespace.html -->
<module name="FileTabCharacter">
<property name="eachLine" value="true"/>
</module>
<module name="TreeWalker">
<module name="OuterTypeFilename"/>
<module name="IllegalTokenText">
<property name="tokens" value="STRING_LITERAL, CHAR_LITERAL"/>
<property name="format" value="\\u00(08|09|0(a|A)|0(c|C)|0(d|D)|22|27|5(C|c))|\\(0(10|11|12|14|15|42|47)|134)"/>
<property name="message" value="Avoid using corresponding octal or Unicode escape."/>
</module>
<module name="AvoidEscapedUnicodeCharacters">
<property name="allowEscapesForControlCharacters" value="true"/>
<property name="allowByTailComment" value="true"/>
<property name="allowNonPrintableEscapes" value="true"/>
</module>
<!-- TODO: 11/09/15 disabled - the lengths are currently > 100 in many places -->
<!--
<module name="LineLength">
<property name="max" value="100"/>
<property name="ignorePattern" value="^package.*|^import.*|a href|href|http://|https://|ftp://"/>
</module>
-->
<module name="NoLineWrap"/>
<module name="EmptyBlock">
<property name="option" value="TEXT"/>
<property name="tokens" value="LITERAL_TRY, LITERAL_FINALLY, LITERAL_IF, LITERAL_ELSE, LITERAL_SWITCH"/>
</module>
<module name="NeedBraces">
<property name="allowSingleLineStatement" value="true"/>
</module>
<module name="OneStatementPerLine"/>
<module name="ArrayTypeStyle"/>
<module name="FallThrough"/>
<module name="UpperEll"/>
<module name="ModifierOrder"/>
<module name="SeparatorWrap">
<property name="tokens" value="DOT"/>
<property name="option" value="nl"/>
</module>
<module name="SeparatorWrap">
<property name="tokens" value="COMMA"/>
<property name="option" value="EOL"/>
</module>
<module name="PackageName">
<property name="format" value="^[a-z]+(\.[a-z][a-z0-9]*)*$"/>
<message key="name.invalidPattern"
value="Package name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="ClassTypeParameterName">
<property name="format" value="([A-Z][a-zA-Z0-9]*$)"/>
<message key="name.invalidPattern"
value="Class type name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="MethodTypeParameterName">
<property name="format" value="([A-Z][a-zA-Z0-9]*)"/>
<message key="name.invalidPattern"
value="Method type name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="NoFinalizer"/>
<module name="GenericWhitespace">
<message key="ws.followed"
value="GenericWhitespace ''{0}'' is followed by whitespace."/>
<message key="ws.preceded"
value="GenericWhitespace ''{0}'' is preceded with whitespace."/>
<message key="ws.illegalFollow"
value="GenericWhitespace ''{0}'' should followed by whitespace."/>
<message key="ws.notPreceded"
value="GenericWhitespace ''{0}'' is not preceded with whitespace."/>
</module>
<!-- TODO: 11/09/15 disabled - indentation is currently inconsistent -->
<!--
<module name="Indentation">
<property name="basicOffset" value="4"/>
<property name="braceAdjustment" value="0"/>
<property name="caseIndent" value="4"/>
<property name="throwsIndent" value="4"/>
<property name="lineWrappingIndentation" value="4"/>
<property name="arrayInitIndent" value="4"/>
</module>
-->
<!-- TODO: 11/09/15 disabled - order is currently wrong in many places -->
<!--
<module name="ImportOrder">
<property name="separated" value="true"/>
<property name="ordered" value="true"/>
<property name="groups" value="/^javax?\./,scala,*,org.apache.spark"/>
</module>
-->
<module name="MethodParamPad"/>
<module name="AnnotationLocation">
<property name="tokens" value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, METHOD_DEF, CTOR_DEF"/>
</module>
<module name="AnnotationLocation">
<property name="tokens" value="VARIABLE_DEF"/>
<property name="allowSamelineMultipleAnnotations" value="true"/>
</module>
<module name="MethodName">
<property name="format" value="^[a-z][a-z0-9][a-zA-Z0-9_]*$"/>
<message key="name.invalidPattern"
value="Method name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="EmptyCatchBlock">
<property name="exceptionVariableName" value="expected"/>
</module>
<module name="CommentsIndentation"/>
</module>
</module>

View file

@ -175,7 +175,7 @@ public final class UnsafeInMemorySorter {
this.position = 0;
}
public SortedIterator clone () {
public SortedIterator clone() {
SortedIterator iter = new SortedIterator(numRecords);
iter.position = position;
iter.baseObject = baseObject;

View file

@ -356,8 +356,8 @@ public abstract class AbstractBytesToBytesMapSuite {
final java.util.BitSet valuesSeen = new java.util.BitSet(NUM_ENTRIES);
final Iterator<BytesToBytesMap.Location> iter = map.iterator();
final long key[] = new long[KEY_LENGTH / 8];
final long value[] = new long[VALUE_LENGTH / 8];
final long[] key = new long[KEY_LENGTH / 8];
final long[] value = new long[VALUE_LENGTH / 8];
while (iter.hasNext()) {
final BytesToBytesMap.Location loc = iter.next();
Assert.assertTrue(loc.isDefined());

30
dev/lint-java Executable file
View file

@ -0,0 +1,30 @@
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
SPARK_ROOT_DIR="$(dirname $SCRIPT_DIR)"
ERRORS=$($SCRIPT_DIR/../build/mvn -Pkinesis-asl -Pyarn -Phive -Phive-thriftserver checkstyle:check | grep ERROR)
if test ! -z "$ERRORS"; then
echo -e "Checkstyle checks failed at following occurrences:\n$ERRORS"
exit 1
else
echo -e "Checkstyle checks passed."
fi

View file

@ -119,6 +119,7 @@ def run_tests(tests_timeout):
ERROR_CODES["BLOCK_GENERAL"]: 'some tests',
ERROR_CODES["BLOCK_RAT"]: 'RAT tests',
ERROR_CODES["BLOCK_SCALA_STYLE"]: 'Scala style tests',
ERROR_CODES["BLOCK_JAVA_STYLE"]: 'Java style tests',
ERROR_CODES["BLOCK_PYTHON_STYLE"]: 'Python style tests',
ERROR_CODES["BLOCK_R_STYLE"]: 'R style tests',
ERROR_CODES["BLOCK_DOCUMENTATION"]: 'to generate documentation',

View file

@ -198,6 +198,11 @@ def run_scala_style_checks():
run_cmd([os.path.join(SPARK_HOME, "dev", "lint-scala")])
def run_java_style_checks():
set_title_and_block("Running Java style checks", "BLOCK_JAVA_STYLE")
run_cmd([os.path.join(SPARK_HOME, "dev", "lint-java")])
def run_python_style_checks():
set_title_and_block("Running Python style checks", "BLOCK_PYTHON_STYLE")
run_cmd([os.path.join(SPARK_HOME, "dev", "lint-python")])
@ -522,6 +527,8 @@ def main():
# style checks
if not changed_files or any(f.endswith(".scala") for f in changed_files):
run_scala_style_checks()
if not changed_files or any(f.endswith(".java") for f in changed_files):
run_java_style_checks()
if not changed_files or any(f.endswith(".py") for f in changed_files):
run_python_style_checks()
if not changed_files or any(f.endswith(".R") for f in changed_files):

View file

@ -31,5 +31,6 @@ ERROR_CODES = {
"BLOCK_SPARK_UNIT_TESTS": 18,
"BLOCK_PYSPARK_UNIT_TESTS": 19,
"BLOCK_SPARKR_UNIT_TESTS": 20,
"BLOCK_JAVA_STYLE": 21,
"BLOCK_TIMEOUT": 124
}

View file

@ -77,7 +77,7 @@ public class JavaSimpleParamsExample {
ParamMap paramMap = new ParamMap();
paramMap.put(lr.maxIter().w(20)); // Specify 1 Param.
paramMap.put(lr.maxIter(), 30); // This overwrites the original maxIter.
double thresholds[] = {0.45, 0.55};
double[] thresholds = {0.45, 0.55};
paramMap.put(lr.regParam().w(0.1), lr.thresholds().w(thresholds)); // Specify multiple Params.
// One can also combine ParamMaps.

View file

@ -41,8 +41,9 @@ public class JavaLDAExample {
public Vector call(String s) {
String[] sarray = s.trim().split(" ");
double[] values = new double[sarray.length];
for (int i = 0; i < sarray.length; i++)
for (int i = 0; i < sarray.length; i++) {
values[i] = Double.parseDouble(sarray[i]);
}
return Vectors.dense(values);
}
}

View file

@ -57,12 +57,12 @@ public class JavaMultiLabelClassificationMetricsExample {
// Stats by labels
for (int i = 0; i < metrics.labels().length - 1; i++) {
System.out.format("Class %1.1f precision = %f\n", metrics.labels()[i], metrics.precision
(metrics.labels()[i]));
System.out.format("Class %1.1f recall = %f\n", metrics.labels()[i], metrics.recall(metrics
.labels()[i]));
System.out.format("Class %1.1f F1 score = %f\n", metrics.labels()[i], metrics.f1Measure
(metrics.labels()[i]));
System.out.format("Class %1.1f precision = %f\n", metrics.labels()[i], metrics.precision(
metrics.labels()[i]));
System.out.format("Class %1.1f recall = %f\n", metrics.labels()[i], metrics.recall(
metrics.labels()[i]));
System.out.format("Class %1.1f F1 score = %f\n", metrics.labels()[i], metrics.f1Measure(
metrics.labels()[i]));
}
// Micro stats

View file

@ -74,12 +74,12 @@ public class JavaMulticlassClassificationMetricsExample {
// Stats by labels
for (int i = 0; i < metrics.labels().length; i++) {
System.out.format("Class %f precision = %f\n", metrics.labels()[i],metrics.precision
(metrics.labels()[i]));
System.out.format("Class %f recall = %f\n", metrics.labels()[i], metrics.recall(metrics
.labels()[i]));
System.out.format("Class %f F1 score = %f\n", metrics.labels()[i], metrics.fMeasure
(metrics.labels()[i]));
System.out.format("Class %f precision = %f\n", metrics.labels()[i],metrics.precision(
metrics.labels()[i]));
System.out.format("Class %f recall = %f\n", metrics.labels()[i], metrics.recall(
metrics.labels()[i]));
System.out.format("Class %f F1 score = %f\n", metrics.labels()[i], metrics.fMeasure(
metrics.labels()[i]));
}
//Weighted stats

View file

@ -120,8 +120,8 @@ public class JavaRankingMetricsExample {
}
}
);
JavaRDD<Tuple2<List<Integer>, List<Integer>>> relevantDocs = userMoviesList.join
(userRecommendedList).values();
JavaRDD<Tuple2<List<Integer>, List<Integer>>> relevantDocs = userMoviesList.join(
userRecommendedList).values();
// Instantiate the metrics object
RankingMetrics metrics = RankingMetrics.of(relevantDocs);

View file

@ -29,7 +29,7 @@ import org.apache.spark.SparkConf;
// $example off$
public class JavaRecommendationExample {
public static void main(String args[]) {
public static void main(String[] args) {
// $example on$
SparkConf conf = new SparkConf().setAppName("Java Collaborative Filtering Example");
JavaSparkContext jsc = new JavaSparkContext(conf);

View file

@ -43,8 +43,9 @@ public class JavaRegressionMetricsExample {
public LabeledPoint call(String line) {
String[] parts = line.split(" ");
double[] v = new double[parts.length - 1];
for (int i = 1; i < parts.length - 1; i++)
for (int i = 1; i < parts.length - 1; i++) {
v[i - 1] = Double.parseDouble(parts[i].split(":")[1]);
}
return new LabeledPoint(Double.parseDouble(parts[0]), Vectors.dense(v));
}
}

View file

@ -112,8 +112,8 @@ public final class JavaSqlNetworkWordCount {
/** Lazily instantiated singleton instance of SQLContext */
class JavaSQLContextSingleton {
static private transient SQLContext instance = null;
static public SQLContext getInstance(SparkContext sparkContext) {
private static transient SQLContext instance = null;
public static SQLContext getInstance(SparkContext sparkContext) {
if (instance == null) {
instance = new SQLContext(sparkContext);
}

View file

@ -58,7 +58,7 @@ public class JavaStringIndexerSuite {
createStructField("label", StringType, false)
});
List<Row> data = Arrays.asList(
c(0, "a"), c(1, "b"), c(2, "c"), c(3, "a"), c(4, "a"), c(5, "c"));
cr(0, "a"), cr(1, "b"), cr(2, "c"), cr(3, "a"), cr(4, "a"), cr(5, "c"));
DataFrame dataset = sqlContext.createDataFrame(data, schema);
StringIndexer indexer = new StringIndexer()
@ -67,12 +67,12 @@ public class JavaStringIndexerSuite {
DataFrame output = indexer.fit(dataset).transform(dataset);
Assert.assertArrayEquals(
new Row[] { c(0, 0.0), c(1, 2.0), c(2, 1.0), c(3, 0.0), c(4, 0.0), c(5, 1.0) },
new Row[] { cr(0, 0.0), cr(1, 2.0), cr(2, 1.0), cr(3, 0.0), cr(4, 0.0), cr(5, 1.0) },
output.orderBy("id").select("id", "labelIndex").collect());
}
/** An alias for RowFactory.create. */
private Row c(Object... values) {
private Row cr(Object... values) {
return RowFactory.create(values);
}
}

View file

@ -144,7 +144,7 @@ public class JavaLDASuite implements Serializable {
}
@Test
public void OnlineOptimizerCompatibility() {
public void onlineOptimizerCompatibility() {
int k = 3;
double topicSmoothing = 1.2;
double termSmoothing = 1.2;

View file

@ -419,7 +419,7 @@ public class ExternalShuffleBlockResolver {
public static class StoreVersion {
final static byte[] KEY = "StoreVersion".getBytes(Charsets.UTF_8);
static final byte[] KEY = "StoreVersion".getBytes(Charsets.UTF_8);
public final int major;
public final int minor;

View file

@ -61,7 +61,7 @@ public class SaslIntegrationSuite {
// Use a long timeout to account for slow / overloaded build machines. In the normal case,
// tests should finish way before the timeout expires.
private final static long TIMEOUT_MS = 10_000;
private static final long TIMEOUT_MS = 10_000;
static TransportServer server;
static TransportConf conf;

24
pom.xml
View file

@ -2256,6 +2256,30 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>2.17</version>
<configuration>
<verbose>false</verbose>
<failOnViolation>false</failOnViolation>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<failOnWarning>false</failOnWarning>
<sourceDirectory>${basedir}/src/main/java</sourceDirectory>
<testSourceDirectory>${basedir}/src/test/java</testSourceDirectory>
<configLocation>checkstyle.xml</configLocation>
<outputFile>${basedir}/target/checkstyle-output.xml</outputFile>
<inputEncoding>${project.build.sourceEncoding}</inputEncoding>
<outputEncoding>${project.reporting.outputEncoding}</outputEncoding>
</configuration>
<executions>
<execution>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>

View file

@ -51,7 +51,7 @@ final class UnsafeExternalRowSorter {
private final PrefixComputer prefixComputer;
private final UnsafeExternalSorter sorter;
public static abstract class PrefixComputer {
public abstract static class PrefixComputer {
abstract long computePrefix(InternalRow row);
}

View file

@ -41,5 +41,5 @@ public @interface SQLUserDefinedType {
* Returns an instance of the UserDefinedType which can serialize and deserialize the user
* class to and from Catalyst built-in types.
*/
Class<? extends UserDefinedType<?> > udt();
Class<? extends UserDefinedType<?>> udt();
}

View file

@ -195,7 +195,7 @@ public abstract class SpecificParquetRecordReaderBase<T> extends RecordReader<Vo
* Creates a reader for definition and repetition levels, returning an optimized one if
* the levels are not needed.
*/
static protected IntIterator createRLEIterator(int maxLevel, BytesInput bytes,
protected static IntIterator createRLEIterator(int maxLevel, BytesInput bytes,
ColumnDescriptor descriptor) throws IOException {
try {
if (maxLevel == 0) return new NullIntIterator();

View file

@ -489,6 +489,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
}
break;
default:
}
}
@ -512,6 +513,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
case M_STRING_STRING:
return getMStringString();
default:
}
throw new IllegalStateException();
}
@ -535,75 +537,91 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
return isSetLintString();
case M_STRING_STRING:
return isSetMStringString();
default:
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
if (that == null) {
return false;
if (that instanceof Complex)
}
if (that instanceof Complex) {
return this.equals((Complex)that);
}
return false;
}
public boolean equals(Complex that) {
if (that == null)
if (that == null) {
return false;
}
boolean this_present_aint = true;
boolean that_present_aint = true;
if (this_present_aint || that_present_aint) {
if (!(this_present_aint && that_present_aint))
if (!(this_present_aint && that_present_aint)) {
return false;
if (this.aint != that.aint)
}
if (this.aint != that.aint) {
return false;
}
}
boolean this_present_aString = true && this.isSetAString();
boolean that_present_aString = true && that.isSetAString();
if (this_present_aString || that_present_aString) {
if (!(this_present_aString && that_present_aString))
if (!(this_present_aString && that_present_aString)) {
return false;
if (!this.aString.equals(that.aString))
}
if (!this.aString.equals(that.aString)) {
return false;
}
}
boolean this_present_lint = true && this.isSetLint();
boolean that_present_lint = true && that.isSetLint();
if (this_present_lint || that_present_lint) {
if (!(this_present_lint && that_present_lint))
if (!(this_present_lint && that_present_lint)) {
return false;
if (!this.lint.equals(that.lint))
}
if (!this.lint.equals(that.lint)) {
return false;
}
}
boolean this_present_lString = true && this.isSetLString();
boolean that_present_lString = true && that.isSetLString();
if (this_present_lString || that_present_lString) {
if (!(this_present_lString && that_present_lString))
if (!(this_present_lString && that_present_lString)) {
return false;
if (!this.lString.equals(that.lString))
}
if (!this.lString.equals(that.lString)) {
return false;
}
}
boolean this_present_lintString = true && this.isSetLintString();
boolean that_present_lintString = true && that.isSetLintString();
if (this_present_lintString || that_present_lintString) {
if (!(this_present_lintString && that_present_lintString))
if (!(this_present_lintString && that_present_lintString)) {
return false;
if (!this.lintString.equals(that.lintString))
}
if (!this.lintString.equals(that.lintString)) {
return false;
}
}
boolean this_present_mStringString = true && this.isSetMStringString();
boolean that_present_mStringString = true && that.isSetMStringString();
if (this_present_mStringString || that_present_mStringString) {
if (!(this_present_mStringString && that_present_mStringString))
if (!(this_present_mStringString && that_present_mStringString)) {
return false;
if (!this.mStringString.equals(that.mStringString))
}
if (!this.mStringString.equals(that.mStringString)) {
return false;
}
}
return true;
@ -615,33 +633,39 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
boolean present_aint = true;
builder.append(present_aint);
if (present_aint)
if (present_aint) {
builder.append(aint);
}
boolean present_aString = true && (isSetAString());
builder.append(present_aString);
if (present_aString)
if (present_aString) {
builder.append(aString);
}
boolean present_lint = true && (isSetLint());
builder.append(present_lint);
if (present_lint)
if (present_lint) {
builder.append(lint);
}
boolean present_lString = true && (isSetLString());
builder.append(present_lString);
if (present_lString)
if (present_lString) {
builder.append(lString);
}
boolean present_lintString = true && (isSetLintString());
builder.append(present_lintString);
if (present_lintString)
if (present_lintString) {
builder.append(lintString);
}
boolean present_mStringString = true && (isSetMStringString());
builder.append(present_mStringString);
if (present_mStringString)
if (present_mStringString) {
builder.append(mStringString);
}
return builder.toHashCode();
}
@ -737,7 +761,9 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
sb.append("aint:");
sb.append(this.aint);
first = false;
if (!first) sb.append(", ");
if (!first) {
sb.append(", ");
}
sb.append("aString:");
if (this.aString == null) {
sb.append("null");
@ -745,7 +771,9 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
sb.append(this.aString);
}
first = false;
if (!first) sb.append(", ");
if (!first) {
sb.append(", ");
}
sb.append("lint:");
if (this.lint == null) {
sb.append("null");
@ -753,7 +781,9 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
sb.append(this.lint);
}
first = false;
if (!first) sb.append(", ");
if (!first) {
sb.append(", ");
}
sb.append("lString:");
if (this.lString == null) {
sb.append("null");
@ -761,7 +791,9 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
sb.append(this.lString);
}
first = false;
if (!first) sb.append(", ");
if (!first) {
sb.append(", ");
}
sb.append("lintString:");
if (this.lintString == null) {
sb.append("null");
@ -769,7 +801,9 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
sb.append(this.lintString);
}
first = false;
if (!first) sb.append(", ");
if (!first) {
sb.append(", ");
}
sb.append("mStringString:");
if (this.mStringString == null) {
sb.append("null");

View file

@ -37,26 +37,26 @@ public abstract class WriteAheadLog {
* ensure that the written data is durable and readable (using the record handle) by the
* time this function returns.
*/
abstract public WriteAheadLogRecordHandle write(ByteBuffer record, long time);
public abstract WriteAheadLogRecordHandle write(ByteBuffer record, long time);
/**
* Read a written record based on the given record handle.
*/
abstract public ByteBuffer read(WriteAheadLogRecordHandle handle);
public abstract ByteBuffer read(WriteAheadLogRecordHandle handle);
/**
* Read and return an iterator of all the records that have been written but not yet cleaned up.
*/
abstract public Iterator<ByteBuffer> readAll();
public abstract Iterator<ByteBuffer> readAll();
/**
* Clean all the records that are older than the threshold time. It can wait for
* the completion of the deletion.
*/
abstract public void clean(long threshTime, boolean waitForCompletion);
public abstract void clean(long threshTime, boolean waitForCompletion);
/**
* Close this log and release any resources.
*/
abstract public void close();
public abstract void close();
}

View file

@ -1332,12 +1332,12 @@ public class JavaAPISuite extends LocalJavaStreamingContext implements Serializa
public void testUpdateStateByKeyWithInitial() {
List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream;
List<Tuple2<String, Integer>> initial = Arrays.asList (
List<Tuple2<String, Integer>> initial = Arrays.asList(
new Tuple2<>("california", 1),
new Tuple2<>("new york", 2));
JavaRDD<Tuple2<String, Integer>> tmpRDD = ssc.sparkContext().parallelize(initial);
JavaPairRDD<String, Integer> initialRDD = JavaPairRDD.fromJavaRDD (tmpRDD);
JavaPairRDD<String, Integer> initialRDD = JavaPairRDD.fromJavaRDD(tmpRDD);
List<List<Tuple2<String, Integer>>> expected = Arrays.asList(
Arrays.asList(new Tuple2<>("california", 5),

View file

@ -95,7 +95,7 @@ public class JavaTrackStateByKeySuite extends LocalJavaStreamingContext implemen
JavaTrackStateDStream<String, Integer, Boolean, Double> stateDstream2 =
wordsDstream.trackStateByKey(
StateSpec.<String, Integer, Boolean, Double> function(trackStateFunc2)
StateSpec.<String, Integer, Boolean, Double>function(trackStateFunc2)
.initialState(initialRDD)
.numPartitions(10)
.partitioner(new HashPartitioner(10))

View file

@ -18,6 +18,7 @@
package org.apache.spark.tags;
import java.lang.annotation.*;
import org.scalatest.TagAnnotation;
@TagAnnotation

View file

@ -18,6 +18,7 @@
package org.apache.spark.tags;
import java.lang.annotation.*;
import org.scalatest.TagAnnotation;
@TagAnnotation

View file

@ -900,9 +900,9 @@ public final class UTF8String implements Comparable<UTF8String>, Externalizable,
m = swap;
}
int p[] = new int[n + 1];
int d[] = new int[n + 1];
int swap[];
int[] p = new int[n + 1];
int[] d = new int[n + 1];
int[] swap;
int i, i_bytes, j, j_bytes, num_bytes_j, cost;
@ -965,7 +965,7 @@ public final class UTF8String implements Comparable<UTF8String>, Externalizable,
// first character must be a letter
return this;
}
byte sx[] = {'0', '0', '0', '0'};
byte[] sx = {'0', '0', '0', '0'};
sx[0] = b;
int sxi = 1;
int idx = b - 'A';