[MINOR][BUILD] Fix Java linter errors
## What changes were proposed in this pull request? This PR cleans up the java-lint errors (for v2.3.0-rc1 tag). Hopefully, this will be the final one. ``` $ dev/lint-java Using `mvn` from path: /usr/local/bin/mvn Checkstyle checks failed at following occurrences: [ERROR] src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java:[85] (sizes) LineLength: Line is longer than 100 characters (found 101). [ERROR] src/main/java/org/apache/spark/launcher/InProcessAppHandle.java:[20,8] (imports) UnusedImports: Unused import - java.io.IOException. [ERROR] src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVector.java:[41,9] (modifier) ModifierOrder: 'private' modifier out of order with the JLS suggestions. [ERROR] src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java:[464] (sizes) LineLength: Line is longer than 100 characters (found 102). ``` ## How was this patch tested? Manual. ``` $ dev/lint-java Using `mvn` from path: /usr/local/bin/mvn Checkstyle checks passed. ``` Author: Dongjoon Hyun <dongjoon@apache.org> Closes #20242 from dongjoon-hyun/fix_lint_java_2.3_rc1.
This commit is contained in:
parent
651f76153f
commit
7bd14cfd40
|
@ -82,7 +82,8 @@ public class HeapMemoryAllocator implements MemoryAllocator {
|
|||
"page has already been freed";
|
||||
assert ((memory.pageNumber == MemoryBlock.NO_PAGE_NUMBER)
|
||||
|| (memory.pageNumber == MemoryBlock.FREED_IN_TMM_PAGE_NUMBER)) :
|
||||
"TMM-allocated pages must first be freed via TMM.freePage(), not directly in allocator free()";
|
||||
"TMM-allocated pages must first be freed via TMM.freePage(), not directly in allocator " +
|
||||
"free()";
|
||||
|
||||
final long size = memory.size();
|
||||
if (MemoryAllocator.MEMORY_DEBUG_FILL_ENABLED) {
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
|
||||
package org.apache.spark.launcher;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.logging.Level;
|
||||
|
|
|
@ -38,7 +38,7 @@ public class OrcColumnVector extends org.apache.spark.sql.vectorized.ColumnVecto
|
|||
private BytesColumnVector bytesData;
|
||||
private DecimalColumnVector decimalData;
|
||||
private TimestampColumnVector timestampData;
|
||||
final private boolean isTimestamp;
|
||||
private final boolean isTimestamp;
|
||||
|
||||
private int batchSize;
|
||||
|
||||
|
|
|
@ -461,7 +461,8 @@ public class JavaDataFrameSuite {
|
|||
public void testUDF() {
|
||||
UserDefinedFunction foo = udf((Integer i, String s) -> i.toString() + s, DataTypes.StringType);
|
||||
Dataset<Row> df = spark.table("testData").select(foo.apply(col("key"), col("value")));
|
||||
String[] result = df.collectAsList().stream().map(row -> row.getString(0)).toArray(String[]::new);
|
||||
String[] result = df.collectAsList().stream().map(row -> row.getString(0))
|
||||
.toArray(String[]::new);
|
||||
String[] expected = spark.table("testData").collectAsList().stream()
|
||||
.map(row -> row.get(0).toString() + row.getString(1)).toArray(String[]::new);
|
||||
Assert.assertArrayEquals(expected, result);
|
||||
|
|
Loading…
Reference in a new issue