[SPARK-22066][BUILD] Update checkstyle to 8.2, enable it, fix violations
## What changes were proposed in this pull request? Update plugins, including scala-maven-plugin, to latest versions. Update checkstyle to 8.2. Remove bogus checkstyle config and enable it. Fix existing and new Java checkstyle errors. ## How was this patch tested? Existing tests Author: Sean Owen <sowen@cloudera.com> Closes #19282 from srowen/SPARK-22066.
This commit is contained in:
parent
280ff523f4
commit
3d4dd14cd5
|
@ -187,7 +187,7 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<version>3.0.0</version>
|
||||
<version>3.1.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>dist</id>
|
||||
|
|
|
@ -17,12 +17,12 @@
|
|||
package org.apache.spark.network.util;
|
||||
|
||||
public enum ByteUnit {
|
||||
BYTE (1),
|
||||
KiB (1024L),
|
||||
MiB ((long) Math.pow(1024L, 2L)),
|
||||
GiB ((long) Math.pow(1024L, 3L)),
|
||||
TiB ((long) Math.pow(1024L, 4L)),
|
||||
PiB ((long) Math.pow(1024L, 5L));
|
||||
BYTE(1),
|
||||
KiB(1024L),
|
||||
MiB((long) Math.pow(1024L, 2L)),
|
||||
GiB((long) Math.pow(1024L, 3L)),
|
||||
TiB((long) Math.pow(1024L, 4L)),
|
||||
PiB((long) Math.pow(1024L, 5L));
|
||||
|
||||
ByteUnit(long multiplier) {
|
||||
this.multiplier = multiplier;
|
||||
|
|
|
@ -44,7 +44,7 @@ public class NettyMemoryMetrics implements MetricSet {
|
|||
private final String metricPrefix;
|
||||
|
||||
@VisibleForTesting
|
||||
final static Set<String> VERBOSE_METRICS = new HashSet<>();
|
||||
static final Set<String> VERBOSE_METRICS = new HashSet<>();
|
||||
static {
|
||||
VERBOSE_METRICS.addAll(Arrays.asList(
|
||||
"numAllocations",
|
||||
|
|
|
@ -56,20 +56,17 @@
|
|||
<build>
|
||||
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
|
||||
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>net.alchim31.maven</groupId>
|
||||
<artifactId>scala-maven-plugin</artifactId>
|
||||
<version>3.2.2</version>
|
||||
<configuration>
|
||||
<javacArgs combine.children="append">
|
||||
<!-- This option is needed to suppress warnings from sun.misc.Unsafe usage -->
|
||||
<javacArg>-XDignore.symbol.file</javacArg>
|
||||
</javacArgs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>net.alchim31.maven</groupId>
|
||||
<artifactId>scala-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<javacArgs combine.children="append">
|
||||
<!-- This option is needed to suppress warnings from sun.misc.Unsafe usage -->
|
||||
<javacArg>-XDignore.symbol.file</javacArg>
|
||||
</javacArgs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -93,20 +93,17 @@
|
|||
<build>
|
||||
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
|
||||
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>net.alchim31.maven</groupId>
|
||||
<artifactId>scala-maven-plugin</artifactId>
|
||||
<version>3.2.2</version>
|
||||
<configuration>
|
||||
<javacArgs combine.children="append">
|
||||
<!-- This option is needed to suppress warnings from sun.misc.Unsafe usage -->
|
||||
<javacArg>-XDignore.symbol.file</javacArg>
|
||||
</javacArgs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>net.alchim31.maven</groupId>
|
||||
<artifactId>scala-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<javacArgs combine.children="append">
|
||||
<!-- This option is needed to suppress warnings from sun.misc.Unsafe usage -->
|
||||
<javacArg>-XDignore.symbol.file</javacArg>
|
||||
</javacArgs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -513,9 +513,9 @@ public final class UTF8String implements Comparable<UTF8String>, Externalizable,
|
|||
|
||||
/**
|
||||
* Based on the given trim string, trim this string starting from both ends
|
||||
* This method searches for each character in the source string, removes the character if it is found
|
||||
* in the trim string, stops at the first not found. It calls the trimLeft first, then trimRight.
|
||||
* It returns a new string in which both ends trim characters have been removed.
|
||||
* This method searches for each character in the source string, removes the character if it is
|
||||
* found in the trim string, stops at the first not found. It calls the trimLeft first, then
|
||||
* trimRight. It returns a new string in which both ends trim characters have been removed.
|
||||
* @param trimString the trim character string
|
||||
*/
|
||||
public UTF8String trim(UTF8String trimString) {
|
||||
|
@ -540,8 +540,9 @@ public final class UTF8String implements Comparable<UTF8String>, Externalizable,
|
|||
|
||||
/**
|
||||
* Based on the given trim string, trim this string starting from left end
|
||||
* This method searches each character in the source string starting from the left end, removes the character if it
|
||||
* is in the trim string, stops at the first character which is not in the trim string, returns the new string.
|
||||
* This method searches each character in the source string starting from the left end, removes
|
||||
* the character if it is in the trim string, stops at the first character which is not in the
|
||||
* trim string, returns the new string.
|
||||
* @param trimString the trim character string
|
||||
*/
|
||||
public UTF8String trimLeft(UTF8String trimString) {
|
||||
|
@ -552,7 +553,8 @@ public final class UTF8String implements Comparable<UTF8String>, Externalizable,
|
|||
int trimIdx = 0;
|
||||
|
||||
while (srchIdx < numBytes) {
|
||||
UTF8String searchChar = copyUTF8String(srchIdx, srchIdx + numBytesForFirstByte(this.getByte(srchIdx)) - 1);
|
||||
UTF8String searchChar = copyUTF8String(
|
||||
srchIdx, srchIdx + numBytesForFirstByte(this.getByte(srchIdx)) - 1);
|
||||
int searchCharBytes = searchChar.numBytes;
|
||||
// try to find the matching for the searchChar in the trimString set
|
||||
if (trimString.find(searchChar, 0) >= 0) {
|
||||
|
@ -587,8 +589,9 @@ public final class UTF8String implements Comparable<UTF8String>, Externalizable,
|
|||
|
||||
/**
|
||||
* Based on the given trim string, trim this string starting from right end
|
||||
* This method searches each character in the source string starting from the right end, removes the character if it
|
||||
* is in the trim string, stops at the first character which is not in the trim string, returns the new string.
|
||||
* This method searches each character in the source string starting from the right end,
|
||||
* removes the character if it is in the trim string, stops at the first character which is not
|
||||
* in the trim string, returns the new string.
|
||||
* @param trimString the trim character string
|
||||
*/
|
||||
public UTF8String trimRight(UTF8String trimString) {
|
||||
|
@ -608,11 +611,13 @@ public final class UTF8String implements Comparable<UTF8String>, Externalizable,
|
|||
numChars ++;
|
||||
}
|
||||
|
||||
// index trimEnd points to the first no matching byte position from the right side of the source string.
|
||||
// index trimEnd points to the first no matching byte position from the right side of
|
||||
// the source string.
|
||||
int trimEnd = numBytes - 1;
|
||||
while (numChars > 0) {
|
||||
UTF8String searchChar =
|
||||
copyUTF8String(stringCharPos[numChars - 1], stringCharPos[numChars - 1] + stringCharLen[numChars - 1] - 1);
|
||||
UTF8String searchChar = copyUTF8String(
|
||||
stringCharPos[numChars - 1],
|
||||
stringCharPos[numChars - 1] + stringCharLen[numChars - 1] - 1);
|
||||
if (trimString.find(searchChar, 0) >= 0) {
|
||||
trimEnd -= stringCharLen[numChars - 1];
|
||||
} else {
|
||||
|
|
|
@ -777,7 +777,8 @@ public class UTF8StringSuite {
|
|||
assertEquals(fromString("cc"), fromString("ccbaaaa").trimRight(fromString("ba")));
|
||||
assertEquals(fromString(""), fromString("aabbbbaaa").trimRight(fromString("ab")));
|
||||
assertEquals(fromString(" he"), fromString(" hello ").trimRight(fromString(" ol")));
|
||||
assertEquals(fromString("oohell"), fromString("oohellooo../*&").trimRight(fromString("./,&%*o")));
|
||||
assertEquals(fromString("oohell"),
|
||||
fromString("oohellooo../*&").trimRight(fromString("./,&%*o")));
|
||||
|
||||
assertEquals(EMPTY_UTF8, fromString(" ").trimRight(fromString(" ")));
|
||||
|
||||
|
|
|
@ -483,6 +483,7 @@
|
|||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<version>1.6.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>sparkr-pkg</id>
|
||||
|
|
|
@ -32,11 +32,12 @@ import java.util.concurrent.locks.ReentrantLock;
|
|||
|
||||
/**
|
||||
* {@link InputStream} implementation which asynchronously reads ahead from the underlying input
|
||||
* stream when specified amount of data has been read from the current buffer. It does it by maintaining
|
||||
* two buffer - active buffer and read ahead buffer. Active buffer contains data which should be returned
|
||||
* when a read() call is issued. The read ahead buffer is used to asynchronously read from the underlying
|
||||
* input stream and once the current active buffer is exhausted, we flip the two buffers so that we can
|
||||
* start reading from the read ahead buffer without being blocked in disk I/O.
|
||||
* stream when specified amount of data has been read from the current buffer. It does it by
|
||||
* maintaining two buffers - active buffer and read ahead buffer. Active buffer contains data
|
||||
* which should be returned when a read() call is issued. The read ahead buffer is used to
|
||||
* asynchronously read from the underlying input stream and once the current active buffer is
|
||||
* exhausted, we flip the two buffers so that we can start reading from the read ahead buffer
|
||||
* without being blocked in disk I/O.
|
||||
*/
|
||||
public class ReadAheadInputStream extends InputStream {
|
||||
|
||||
|
@ -83,7 +84,8 @@ public class ReadAheadInputStream extends InputStream {
|
|||
|
||||
private final InputStream underlyingInputStream;
|
||||
|
||||
private final ExecutorService executorService = ThreadUtils.newDaemonSingleThreadExecutor("read-ahead");
|
||||
private final ExecutorService executorService =
|
||||
ThreadUtils.newDaemonSingleThreadExecutor("read-ahead");
|
||||
|
||||
private final Condition asyncReadComplete = stateChangeLock.newCondition();
|
||||
|
||||
|
@ -98,13 +100,14 @@ public class ReadAheadInputStream extends InputStream {
|
|||
* @param readAheadThresholdInBytes If the active buffer has less data than the read-ahead
|
||||
* threshold, an async read is triggered.
|
||||
*/
|
||||
public ReadAheadInputStream(InputStream inputStream, int bufferSizeInBytes, int readAheadThresholdInBytes) {
|
||||
public ReadAheadInputStream(
|
||||
InputStream inputStream, int bufferSizeInBytes, int readAheadThresholdInBytes) {
|
||||
Preconditions.checkArgument(bufferSizeInBytes > 0,
|
||||
"bufferSizeInBytes should be greater than 0, but the value is " + bufferSizeInBytes);
|
||||
Preconditions.checkArgument(readAheadThresholdInBytes > 0 &&
|
||||
readAheadThresholdInBytes < bufferSizeInBytes,
|
||||
"readAheadThresholdInBytes should be greater than 0 and less than bufferSizeInBytes, but the" +
|
||||
"value is " + readAheadThresholdInBytes);
|
||||
"readAheadThresholdInBytes should be greater than 0 and less than bufferSizeInBytes, " +
|
||||
"but the value is " + readAheadThresholdInBytes);
|
||||
activeBuffer = ByteBuffer.allocate(bufferSizeInBytes);
|
||||
readAheadBuffer = ByteBuffer.allocate(bufferSizeInBytes);
|
||||
this.readAheadThresholdInBytes = readAheadThresholdInBytes;
|
||||
|
|
|
@ -76,9 +76,8 @@ public final class UnsafeSorterSpillReader extends UnsafeSorterIterator implemen
|
|||
SparkEnv.get() == null ? 0.5 :
|
||||
SparkEnv.get().conf().getDouble("spark.unsafe.sorter.spill.read.ahead.fraction", 0.5);
|
||||
|
||||
final boolean readAheadEnabled =
|
||||
SparkEnv.get() == null ? false :
|
||||
SparkEnv.get().conf().getBoolean("spark.unsafe.sorter.spill.read.ahead.enabled", true);
|
||||
final boolean readAheadEnabled = SparkEnv.get() != null &&
|
||||
SparkEnv.get().conf().getBoolean("spark.unsafe.sorter.spill.read.ahead.enabled", true);
|
||||
|
||||
final InputStream bs =
|
||||
new NioBufferedFileInputStream(file, (int) bufferSizeBytes);
|
||||
|
|
|
@ -28,6 +28,7 @@ public class ReadAheadInputStreamSuite extends GenericFileInputStreamSuite {
|
|||
@Before
|
||||
public void setUp() throws IOException {
|
||||
super.setUp();
|
||||
inputStream = new ReadAheadInputStream(new NioBufferedFileInputStream(inputFile), 8 * 1024, 4 * 1024);
|
||||
inputStream = new ReadAheadInputStream(
|
||||
new NioBufferedFileInputStream(inputFile), 8 * 1024, 4 * 1024);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,20 +52,6 @@
|
|||
<property name="file" value="dev/checkstyle-suppressions.xml"/>
|
||||
</module>
|
||||
|
||||
<!--
|
||||
If you wish to turn off checking for a section of code, you can put a comment in the source
|
||||
before and after the section, with the following syntax:
|
||||
|
||||
// checkstyle:off no.XXX (such as checkstyle.off: NoFinalizer)
|
||||
... // stuff that breaks the styles
|
||||
// checkstyle:on
|
||||
-->
|
||||
<module name="SuppressionCommentFilter">
|
||||
<property name="offCommentFormat" value="checkstyle.off\: ([\w\|]+)"/>
|
||||
<property name="onCommentFormat" value="checkstyle.on\: ([\w\|]+)"/>
|
||||
<property name="checkFormat" value="$1"/>
|
||||
</module>
|
||||
|
||||
<!-- Checks for whitespace -->
|
||||
<!-- See http://checkstyle.sf.net/config_whitespace.html -->
|
||||
<module name="FileTabCharacter">
|
||||
|
@ -81,6 +67,19 @@
|
|||
<module name="NewlineAtEndOfFile"/>
|
||||
|
||||
<module name="TreeWalker">
|
||||
<!--
|
||||
If you wish to turn off checking for a section of code, you can put a comment in the source
|
||||
before and after the section, with the following syntax:
|
||||
|
||||
// checkstyle:off no.XXX (such as checkstyle.off: NoFinalizer)
|
||||
... // stuff that breaks the styles
|
||||
// checkstyle:on
|
||||
-->
|
||||
<module name="SuppressionCommentFilter">
|
||||
<property name="offCommentFormat" value="checkstyle.off\: ([\w\|]+)"/>
|
||||
<property name="onCommentFormat" value="checkstyle.on\: ([\w\|]+)"/>
|
||||
<property name="checkFormat" value="$1"/>
|
||||
</module>
|
||||
<module name="OuterTypeFilename"/>
|
||||
<module name="IllegalTokenText">
|
||||
<property name="tokens" value="STRING_LITERAL, CHAR_LITERAL"/>
|
||||
|
@ -182,6 +181,5 @@
|
|||
<module name="UnusedImports"/>
|
||||
<module name="RedundantImport"/>
|
||||
<module name="RedundantModifier"/>
|
||||
<module name="FileContentsHolder"/>
|
||||
</module>
|
||||
</module>
|
||||
|
|
|
@ -46,7 +46,7 @@ OLD_VERSION=$($MVN -q \
|
|||
-Dexec.executable="echo" \
|
||||
-Dexec.args='${project.version}' \
|
||||
--non-recursive \
|
||||
org.codehaus.mojo:exec-maven-plugin:1.5.0:exec)
|
||||
org.codehaus.mojo:exec-maven-plugin:1.6.0:exec)
|
||||
if [ $? != 0 ]; then
|
||||
echo -e "Error while getting version string from Maven:\n$OLD_VERSION"
|
||||
exit 1
|
||||
|
|
28
pom.xml
28
pom.xml
|
@ -1969,7 +1969,7 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-enforcer-plugin</artifactId>
|
||||
<version>1.4.1</version>
|
||||
<version>3.0.0-M1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>enforce-versions</id>
|
||||
|
@ -2012,7 +2012,7 @@
|
|||
<plugin>
|
||||
<groupId>net.alchim31.maven</groupId>
|
||||
<artifactId>scala-maven-plugin</artifactId>
|
||||
<version>3.2.2</version>
|
||||
<version>3.3.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>eclipse-add-source</id>
|
||||
|
@ -2061,7 +2061,7 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.6.1</version>
|
||||
<version>3.7.0</version>
|
||||
<configuration>
|
||||
<source>${java.version}</source>
|
||||
<target>${java.version}</target>
|
||||
|
@ -2078,7 +2078,7 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>2.19.1</version>
|
||||
<version>2.20.1</version>
|
||||
<!-- Note config is repeated in scalatest config -->
|
||||
<configuration>
|
||||
<includes>
|
||||
|
@ -2222,7 +2222,7 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>2.10.4</version>
|
||||
<version>3.0.0-M1</version>
|
||||
<configuration>
|
||||
<additionalparam>-Xdoclint:all -Xdoclint:-missing</additionalparam>
|
||||
<tags>
|
||||
|
@ -2262,17 +2262,17 @@
|
|||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<version>1.5.0</version>
|
||||
<version>1.6.0</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<version>3.0.0</version>
|
||||
<version>3.1.0</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-shade-plugin</artifactId>
|
||||
<version>3.0.0</version>
|
||||
<version>3.1.0</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
|
@ -2287,7 +2287,7 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<version>3.0.0</version>
|
||||
<version>3.0.2</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>default-cli</id>
|
||||
|
@ -2487,10 +2487,7 @@
|
|||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<version>2.17</version>
|
||||
<configuration>
|
||||
<verbose>false</verbose>
|
||||
<failOnViolation>false</failOnViolation>
|
||||
<includeTestSourceDirectory>true</includeTestSourceDirectory>
|
||||
<failOnWarning>false</failOnWarning>
|
||||
<sourceDirectories>${basedir}/src/main/java,${basedir}/src/main/scala</sourceDirectories>
|
||||
<testSourceDirectory>${basedir}/src/test/java</testSourceDirectory>
|
||||
<configLocation>dev/checkstyle.xml</configLocation>
|
||||
|
@ -2498,6 +2495,13 @@
|
|||
<inputEncoding>${project.build.sourceEncoding}</inputEncoding>
|
||||
<outputEncoding>${project.reporting.outputEncoding}</outputEncoding>
|
||||
</configuration>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.puppycrawl.tools</groupId>
|
||||
<artifactId>checkstyle</artifactId>
|
||||
<version>8.2</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
|
|
|
@ -21,15 +21,14 @@ import java.util.List;
|
|||
|
||||
import org.apache.spark.annotation.InterfaceStability;
|
||||
import org.apache.spark.sql.Row;
|
||||
import org.apache.spark.sql.sources.v2.DataSourceV2Options;
|
||||
import org.apache.spark.sql.sources.v2.ReadSupport;
|
||||
import org.apache.spark.sql.sources.v2.ReadSupportWithSchema;
|
||||
import org.apache.spark.sql.types.StructType;
|
||||
|
||||
/**
|
||||
* A data source reader that is returned by
|
||||
* {@link ReadSupport#createReader(DataSourceV2Options)} or
|
||||
* {@link ReadSupportWithSchema#createReader(StructType, DataSourceV2Options)}.
|
||||
* {@link org.apache.spark.sql.sources.v2.ReadSupport#createReader(
|
||||
* org.apache.spark.sql.sources.v2.DataSourceV2Options)} or
|
||||
* {@link org.apache.spark.sql.sources.v2.ReadSupportWithSchema#createReader(
|
||||
* StructType, org.apache.spark.sql.sources.v2.DataSourceV2Options)}.
|
||||
* It can mix in various query optimization interfaces to speed up the data scan. The actual scan
|
||||
* logic should be delegated to {@link ReadTask}s that are returned by {@link #createReadTasks()}.
|
||||
*
|
||||
|
|
|
@ -23,8 +23,6 @@ import org.apache.spark.annotation.Experimental;
|
|||
import org.apache.spark.annotation.InterfaceStability;
|
||||
import org.apache.spark.sql.Row;
|
||||
import org.apache.spark.sql.catalyst.expressions.UnsafeRow;
|
||||
import org.apache.spark.sql.sources.v2.reader.DataSourceV2Reader;
|
||||
import org.apache.spark.sql.sources.v2.reader.ReadTask;
|
||||
|
||||
/**
|
||||
* A mix-in interface for {@link DataSourceV2Reader}. Data source readers can implement this
|
||||
|
@ -39,7 +37,8 @@ public interface SupportsScanUnsafeRow extends DataSourceV2Reader {
|
|||
|
||||
@Override
|
||||
default List<ReadTask<Row>> createReadTasks() {
|
||||
throw new IllegalStateException("createReadTasks should not be called with SupportsScanUnsafeRow.");
|
||||
throw new IllegalStateException(
|
||||
"createReadTasks should not be called with SupportsScanUnsafeRow.");
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -32,7 +32,9 @@ import org.apache.spark.sql.types.StructType;
|
|||
|
||||
public class JavaAdvancedDataSourceV2 implements DataSourceV2, ReadSupport {
|
||||
|
||||
class Reader implements DataSourceV2Reader, SupportsPushDownRequiredColumns, SupportsPushDownFilters {
|
||||
class Reader implements DataSourceV2Reader, SupportsPushDownRequiredColumns,
|
||||
SupportsPushDownFilters {
|
||||
|
||||
private StructType requiredSchema = new StructType().add("i", "int").add("j", "int");
|
||||
private Filter[] filters = new Filter[0];
|
||||
|
||||
|
|
Loading…
Reference in a new issue