[SPARK-19227][SPARK-19251] remove unused imports and outdated comments
## What changes were proposed in this pull request? remove ununsed imports and outdated comments, and fix some minor code style issue. ## How was this patch tested? existing ut Author: uncleGen <hustyugm@gmail.com> Closes #16591 from uncleGen/SPARK-19227.
This commit is contained in:
parent
4494cd9716
commit
eefdf9f9dd
|
@ -26,11 +26,9 @@ import javax.net.ssl._
|
|||
import com.google.common.hash.HashCodes
|
||||
import com.google.common.io.Files
|
||||
import org.apache.hadoop.io.Text
|
||||
import org.apache.hadoop.security.Credentials
|
||||
|
||||
import org.apache.spark.deploy.SparkHadoopUtil
|
||||
import org.apache.spark.internal.Logging
|
||||
import org.apache.spark.internal.config._
|
||||
import org.apache.spark.network.sasl.SecretKeyHolder
|
||||
import org.apache.spark.util.Utils
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.spark
|
|||
|
||||
import java.io._
|
||||
import java.lang.reflect.Constructor
|
||||
import java.net.{URI}
|
||||
import java.net.URI
|
||||
import java.util.{Arrays, Locale, Properties, ServiceLoader, UUID}
|
||||
import java.util.concurrent.{ConcurrentHashMap, ConcurrentMap}
|
||||
import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger, AtomicReference}
|
||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.spark.deploy
|
|||
|
||||
import javax.annotation.concurrent.ThreadSafe
|
||||
|
||||
import com.codahale.metrics.{Gauge, MetricRegistry}
|
||||
import com.codahale.metrics.MetricRegistry
|
||||
|
||||
import org.apache.spark.metrics.source.Source
|
||||
import org.apache.spark.network.shuffle.ExternalShuffleBlockHandler
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
package org.apache.spark.deploy
|
||||
|
||||
import java.io.{File, IOException, PrintStream}
|
||||
import java.io.{File, IOException}
|
||||
import java.lang.reflect.{InvocationTargetException, Modifier, UndeclaredThrowableException}
|
||||
import java.net.URL
|
||||
import java.security.PrivilegedExceptionAction
|
||||
|
|
|
@ -22,8 +22,6 @@ import javax.servlet.http.HttpServletRequest
|
|||
|
||||
import scala.xml.{Node, Unparsed}
|
||||
|
||||
import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache}
|
||||
|
||||
import org.apache.spark.internal.Logging
|
||||
import org.apache.spark.ui.{UIUtils, WebUIPage}
|
||||
import org.apache.spark.util.Utils
|
||||
|
|
|
@ -17,12 +17,6 @@
|
|||
|
||||
package org.apache.spark.internal.config
|
||||
|
||||
import java.util.{Map => JMap}
|
||||
|
||||
import scala.util.matching.Regex
|
||||
|
||||
import org.apache.spark.SparkConf
|
||||
|
||||
/**
|
||||
* An entry contains all meta information for a configuration.
|
||||
*
|
||||
|
@ -34,7 +28,6 @@ import org.apache.spark.SparkConf
|
|||
* value declared as a string.
|
||||
*
|
||||
* @param key the key for the configuration
|
||||
* @param defaultValue the default value for the configuration
|
||||
* @param valueConverter how to convert a string to the value. It should throw an exception if the
|
||||
* string does not have the required format.
|
||||
* @param stringConverter how to convert a value to a string that the user can use it as a valid
|
||||
|
@ -76,7 +69,7 @@ private class ConfigEntryWithDefault[T] (
|
|||
stringConverter: T => String,
|
||||
doc: String,
|
||||
isPublic: Boolean)
|
||||
extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {
|
||||
extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {
|
||||
|
||||
override def defaultValue: Option[T] = Some(_defaultValue)
|
||||
|
||||
|
@ -95,7 +88,7 @@ private class ConfigEntryWithDefaultString[T] (
|
|||
stringConverter: T => String,
|
||||
doc: String,
|
||||
isPublic: Boolean)
|
||||
extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {
|
||||
extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {
|
||||
|
||||
override def defaultValue: Option[T] = Some(valueConverter(_defaultValue))
|
||||
|
||||
|
@ -118,8 +111,8 @@ private[spark] class OptionalConfigEntry[T](
|
|||
val rawStringConverter: T => String,
|
||||
doc: String,
|
||||
isPublic: Boolean)
|
||||
extends ConfigEntry[Option[T]](key, s => Some(rawValueConverter(s)),
|
||||
v => v.map(rawStringConverter).orNull, doc, isPublic) {
|
||||
extends ConfigEntry[Option[T]](key, s => Some(rawValueConverter(s)),
|
||||
v => v.map(rawStringConverter).orNull, doc, isPublic) {
|
||||
|
||||
override def defaultValueString: String = "<undefined>"
|
||||
|
||||
|
@ -137,7 +130,7 @@ private class FallbackConfigEntry[T] (
|
|||
doc: String,
|
||||
isPublic: Boolean,
|
||||
private[config] val fallback: ConfigEntry[T])
|
||||
extends ConfigEntry[T](key, fallback.valueConverter, fallback.stringConverter, doc, isPublic) {
|
||||
extends ConfigEntry[T](key, fallback.valueConverter, fallback.stringConverter, doc, isPublic) {
|
||||
|
||||
override def defaultValueString: String = s"<value of ${fallback.key}>"
|
||||
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
package org.apache.spark.internal.config
|
||||
|
||||
import java.util.{Map => JMap}
|
||||
import java.util.regex.Pattern
|
||||
|
||||
import scala.collection.mutable.HashMap
|
||||
import scala.util.matching.Regex
|
||||
|
|
|
@ -19,11 +19,10 @@ package org.apache.spark.rpc
|
|||
|
||||
import java.util.concurrent.TimeoutException
|
||||
|
||||
import scala.concurrent.{Await, Future}
|
||||
import scala.concurrent.Future
|
||||
import scala.concurrent.duration._
|
||||
import scala.util.control.NonFatal
|
||||
|
||||
import org.apache.spark.{SparkConf, SparkException}
|
||||
import org.apache.spark.SparkConf
|
||||
import org.apache.spark.util.{ThreadUtils, Utils}
|
||||
|
||||
/**
|
||||
|
|
|
@ -24,7 +24,6 @@ import java.util.Properties
|
|||
|
||||
import org.apache.spark._
|
||||
import org.apache.spark.broadcast.Broadcast
|
||||
import org.apache.spark.executor.TaskMetrics
|
||||
import org.apache.spark.rdd.RDD
|
||||
|
||||
/**
|
||||
|
|
|
@ -25,7 +25,6 @@ import scala.language.existentials
|
|||
|
||||
import org.apache.spark._
|
||||
import org.apache.spark.broadcast.Broadcast
|
||||
import org.apache.spark.executor.TaskMetrics
|
||||
import org.apache.spark.internal.Logging
|
||||
import org.apache.spark.rdd.RDD
|
||||
import org.apache.spark.shuffle.ShuffleWriter
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.spark.scheduler
|
|||
|
||||
import scala.collection.mutable.HashSet
|
||||
|
||||
import org.apache.spark._
|
||||
import org.apache.spark.executor.TaskMetrics
|
||||
import org.apache.spark.internal.Logging
|
||||
import org.apache.spark.rdd.RDD
|
||||
|
|
|
@ -46,7 +46,6 @@ import org.apache.spark.util._
|
|||
* @param stageId id of the stage this task belongs to
|
||||
* @param stageAttemptId attempt id of the stage this task belongs to
|
||||
* @param partitionId index of the number in the RDD
|
||||
* @param metrics a `TaskMetrics` that is created at driver side and sent to executor side.
|
||||
* @param localProperties copy of thread-local properties set by the user on the driver side.
|
||||
* @param serializedTaskMetrics a `TaskMetrics` that is created and serialized on the driver side
|
||||
* and sent to executor side.
|
||||
|
|
|
@ -23,7 +23,6 @@ import javax.annotation.concurrent.NotThreadSafe
|
|||
|
||||
import scala.reflect.ClassTag
|
||||
|
||||
import org.apache.spark.SparkEnv
|
||||
import org.apache.spark.annotation.{DeveloperApi, Private}
|
||||
import org.apache.spark.util.NextIterator
|
||||
|
||||
|
|
|
@ -23,7 +23,6 @@ import java.nio.ByteBuffer
|
|||
import scala.reflect.ClassTag
|
||||
|
||||
import org.apache.spark.SparkConf
|
||||
import org.apache.spark.internal.config._
|
||||
import org.apache.spark.io.CompressionCodec
|
||||
import org.apache.spark.security.CryptoStreamUtils
|
||||
import org.apache.spark.storage._
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
*/
|
||||
package org.apache.spark.status.api.v1
|
||||
|
||||
import javax.ws.rs.{GET, PathParam, Produces}
|
||||
import javax.ws.rs.{GET, Produces}
|
||||
import javax.ws.rs.core.MediaType
|
||||
|
||||
import org.apache.spark.ui.SparkUI
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.spark.util.Utils
|
|||
* ExternalBlockStore, whether to keep the data in memory in a serialized format, and whether
|
||||
* to replicate the RDD partitions on multiple nodes.
|
||||
*
|
||||
* The [[org.apache.spark.storage.StorageLevel$]] singleton object contains some static constants
|
||||
* The [[org.apache.spark.storage.StorageLevel]] singleton object contains some static constants
|
||||
* for commonly useful storage levels. To create your own storage level object, use the
|
||||
* factory method of the singleton object (`StorageLevel(...)`).
|
||||
*/
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.spark.util.random
|
|||
|
||||
import java.util.Random
|
||||
|
||||
import scala.collection.mutable.ArrayBuffer
|
||||
import scala.reflect.ClassTag
|
||||
|
||||
import org.apache.commons.math3.distribution.PoissonDistribution
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
|
||||
package org.apache.spark
|
||||
|
||||
import scala.collection.JavaConverters._
|
||||
import scala.collection.mutable.ArrayBuffer
|
||||
|
||||
import org.apache.spark.executor.TaskMetrics
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.mockito.Mockito.mock
|
|||
import org.scalatest._
|
||||
|
||||
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
|
||||
import org.apache.spark.internal.config._
|
||||
import org.apache.spark.network.BlockDataManager
|
||||
|
||||
class NettyBlockTransferServiceSuite
|
||||
|
|
|
@ -21,10 +21,6 @@ import org.scalatest.Matchers
|
|||
|
||||
import org.apache.spark.SparkFunSuite
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
||||
class DistributionSuite extends SparkFunSuite with Matchers {
|
||||
test("summary") {
|
||||
val d = new Distribution((1 to 100).toArray.map{_.toDouble})
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.apache.spark.examples.ml
|
|||
// $example on$
|
||||
import org.apache.spark.ml.feature.Binarizer
|
||||
// $example off$
|
||||
import org.apache.spark.sql.{SparkSession}
|
||||
import org.apache.spark.sql.SparkSession
|
||||
|
||||
object BinarizerExample {
|
||||
def main(args: Array[String]): Unit = {
|
||||
|
|
|
@ -16,10 +16,6 @@
|
|||
*/
|
||||
package org.apache.spark.examples.sql
|
||||
|
||||
// $example on:schema_inferring$
|
||||
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
|
||||
import org.apache.spark.sql.Encoder
|
||||
// $example off:schema_inferring$
|
||||
import org.apache.spark.sql.Row
|
||||
// $example on:init_session$
|
||||
import org.apache.spark.sql.SparkSession
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
// scalastyle:off println
|
||||
package org.apache.spark.examples.sql.streaming
|
||||
|
||||
import org.apache.spark.sql.functions._
|
||||
import org.apache.spark.sql.SparkSession
|
||||
|
||||
/**
|
||||
|
|
|
@ -24,7 +24,7 @@ import java.nio.charset.StandardCharsets
|
|||
import scala.collection.JavaConverters._
|
||||
import scala.util.control.NonFatal
|
||||
|
||||
import org.apache.kafka.clients.consumer.{Consumer, ConsumerConfig, KafkaConsumer, OffsetOutOfRangeException}
|
||||
import org.apache.kafka.clients.consumer.{Consumer, ConsumerConfig, KafkaConsumer}
|
||||
import org.apache.kafka.clients.consumer.internals.NoOpConsumerRebalanceListener
|
||||
import org.apache.kafka.common.TopicPartition
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ import scala.collection.mutable.ArrayBuffer
|
|||
import org.apache.kafka.clients.consumer.{ ConsumerConfig, ConsumerRecord }
|
||||
import org.apache.kafka.common.TopicPartition
|
||||
|
||||
import org.apache.spark.{Partition, SparkContext, SparkException, TaskContext}
|
||||
import org.apache.spark.{Partition, SparkContext, TaskContext}
|
||||
import org.apache.spark.internal.Logging
|
||||
import org.apache.spark.partial.{BoundedDouble, PartialResult}
|
||||
import org.apache.spark.rdd.RDD
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.kafka.common.TopicPartition
|
|||
import org.apache.spark.SparkContext
|
||||
import org.apache.spark.annotation.Experimental
|
||||
import org.apache.spark.api.java.{ JavaRDD, JavaSparkContext }
|
||||
import org.apache.spark.api.java.function.{ Function0 => JFunction0 }
|
||||
import org.apache.spark.internal.Logging
|
||||
import org.apache.spark.rdd.RDD
|
||||
import org.apache.spark.streaming.StreamingContext
|
||||
|
|
|
@ -22,7 +22,7 @@ import java.nio.ByteBuffer
|
|||
|
||||
import scala.util.Random
|
||||
|
||||
import com.amazonaws.auth.{BasicAWSCredentials, DefaultAWSCredentialsProviderChain}
|
||||
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain
|
||||
import com.amazonaws.regions.RegionUtils
|
||||
import com.amazonaws.services.kinesis.AmazonKinesisClient
|
||||
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionInStream
|
||||
|
|
|
@ -26,7 +26,7 @@ import com.amazonaws.services.kinesis.clientlibrary.types.ShutdownReason
|
|||
import org.apache.spark.internal.Logging
|
||||
import org.apache.spark.streaming.Duration
|
||||
import org.apache.spark.streaming.util.RecurringTimer
|
||||
import org.apache.spark.util.{Clock, SystemClock, ThreadUtils}
|
||||
import org.apache.spark.util.{Clock, SystemClock}
|
||||
|
||||
/**
|
||||
* This is a helper class for managing Kinesis checkpointing.
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
package org.apache.spark.streaming.kinesis
|
||||
|
||||
import java.util.concurrent.{ExecutorService, TimeoutException}
|
||||
import java.util.concurrent.TimeoutException
|
||||
|
||||
import scala.concurrent.{Await, ExecutionContext, Future}
|
||||
import scala.concurrent.duration._
|
||||
|
@ -30,7 +30,6 @@ import org.mockito.invocation.InvocationOnMock
|
|||
import org.mockito.stubbing.Answer
|
||||
import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}
|
||||
import org.scalatest.concurrent.Eventually
|
||||
import org.scalatest.concurrent.Eventually._
|
||||
import org.scalatest.mock.MockitoSugar
|
||||
|
||||
import org.apache.spark.streaming.{Duration, TestSuiteBase}
|
||||
|
|
|
@ -23,11 +23,8 @@ import java.util.List;
|
|||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import static org.apache.spark.launcher.SparkSubmitOptionParser.*;
|
||||
|
||||
public class SparkSubmitOptionParserSuite extends BaseSuite {
|
||||
|
||||
private SparkSubmitOptionParser parser;
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.spark.ml.util.{MetadataUtils, SchemaUtils}
|
|||
import org.apache.spark.rdd.RDD
|
||||
import org.apache.spark.sql.{DataFrame, Dataset, Row}
|
||||
import org.apache.spark.sql.functions._
|
||||
import org.apache.spark.sql.types.{DataType, DoubleType, StructType}
|
||||
import org.apache.spark.sql.types.{DataType, StructType}
|
||||
|
||||
/**
|
||||
* (private[spark]) Params for classification.
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.apache.spark.mllib.tree.model.{GradientBoostedTreesModel => OldGBTMod
|
|||
import org.apache.spark.rdd.RDD
|
||||
import org.apache.spark.sql.{DataFrame, Dataset, Row}
|
||||
import org.apache.spark.sql.functions._
|
||||
import org.apache.spark.sql.types.DoubleType
|
||||
|
||||
/**
|
||||
* Gradient-Boosted Trees (GBTs) (http://en.wikipedia.org/wiki/Gradient_boosting)
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.hadoop.mapreduce.{Job, TaskAttemptContext}
|
|||
|
||||
import org.apache.spark.TaskContext
|
||||
import org.apache.spark.ml.feature.LabeledPoint
|
||||
import org.apache.spark.ml.linalg.{Vector, Vectors, VectorUDT}
|
||||
import org.apache.spark.ml.linalg.{Vectors, VectorUDT}
|
||||
import org.apache.spark.mllib.util.MLUtils
|
||||
import org.apache.spark.sql.{Row, SparkSession}
|
||||
import org.apache.spark.sql.catalyst.InternalRow
|
||||
|
|
|
@ -17,8 +17,8 @@
|
|||
|
||||
package org.apache.spark.repl
|
||||
|
||||
import scala.tools.nsc.{Settings, CompilerCommand}
|
||||
import scala.Predef._
|
||||
import scala.tools.nsc.{CompilerCommand, Settings}
|
||||
|
||||
import org.apache.spark.annotation.DeveloperApi
|
||||
|
||||
/**
|
||||
|
|
|
@ -10,8 +10,6 @@ package org.apache.spark.repl
|
|||
import scala.tools.nsc._
|
||||
import scala.tools.nsc.interpreter._
|
||||
|
||||
import scala.reflect.internal.util.Position
|
||||
import scala.util.control.Exception.ignoring
|
||||
import scala.tools.nsc.util.stackTraceString
|
||||
|
||||
import org.apache.spark.SPARK_VERSION
|
||||
|
|
|
@ -19,8 +19,6 @@ package org.apache.spark.deploy.yarn
|
|||
|
||||
import scala.collection.mutable.ArrayBuffer
|
||||
|
||||
import org.apache.spark.util.{IntParam, MemoryParam}
|
||||
|
||||
class ApplicationMasterArguments(val args: Array[String]) {
|
||||
var userJar: String = null
|
||||
var userClass: String = null
|
||||
|
|
|
@ -18,10 +18,9 @@
|
|||
package org.apache.spark.deploy.yarn.security
|
||||
|
||||
import org.apache.hadoop.conf.Configuration
|
||||
import org.apache.hadoop.fs.Path
|
||||
import org.scalatest.{Matchers, PrivateMethodTester}
|
||||
|
||||
import org.apache.spark.{SparkConf, SparkException, SparkFunSuite}
|
||||
import org.apache.spark.{SparkException, SparkFunSuite}
|
||||
|
||||
class HadoopFSCredentialProviderSuite
|
||||
extends SparkFunSuite
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
|
||||
package org.apache.spark.sql.catalyst.expressions
|
||||
|
||||
import org.apache.spark.TaskContext
|
||||
import org.apache.spark.sql.catalyst.InternalRow
|
||||
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode}
|
||||
import org.apache.spark.sql.types.{DataType, LongType}
|
||||
|
|
|
@ -21,7 +21,6 @@ import java.nio.ByteBuffer
|
|||
|
||||
import com.google.common.primitives.{Doubles, Ints, Longs}
|
||||
|
||||
import org.apache.spark.sql.AnalysisException
|
||||
import org.apache.spark.sql.catalyst.InternalRow
|
||||
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
|
||||
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess}
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.spark.sql.catalyst.expressions.aggregate
|
|||
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}
|
||||
import java.util
|
||||
|
||||
import org.apache.spark.sql.AnalysisException
|
||||
import org.apache.spark.sql.catalyst.InternalRow
|
||||
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
|
||||
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess}
|
||||
|
|
|
@ -17,8 +17,6 @@
|
|||
|
||||
package org.apache.spark.sql.catalyst.expressions.aggregate
|
||||
|
||||
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}
|
||||
|
||||
import scala.collection.generic.Growable
|
||||
import scala.collection.mutable
|
||||
|
||||
|
|
|
@ -23,7 +23,6 @@ import scala.util.parsing.combinator.RegexParsers
|
|||
|
||||
import com.fasterxml.jackson.core._
|
||||
|
||||
import org.apache.spark.sql.AnalysisException
|
||||
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
|
||||
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
|
||||
import org.apache.spark.sql.catalyst.InternalRow
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
package org.apache.spark.sql.catalyst.expressions
|
||||
|
||||
import org.apache.spark.sql.catalyst.InternalRow
|
||||
import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion}
|
||||
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
|
||||
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode}
|
||||
import org.apache.spark.sql.catalyst.util.TypeUtils
|
||||
import org.apache.spark.sql.types._
|
||||
|
|
|
@ -17,15 +17,11 @@
|
|||
|
||||
package org.apache.spark.sql.catalyst.planning
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import scala.collection.mutable
|
||||
|
||||
import org.apache.spark.internal.Logging
|
||||
import org.apache.spark.sql.catalyst.expressions._
|
||||
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
|
||||
import org.apache.spark.sql.catalyst.plans._
|
||||
import org.apache.spark.sql.catalyst.plans.logical._
|
||||
import org.apache.spark.sql.types.IntegerType
|
||||
|
||||
/**
|
||||
* A pattern that matches any number of project or filter operations on top of another relational
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
|
||||
package org.apache.spark.sql.catalyst.plans
|
||||
|
||||
import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute
|
||||
import org.apache.spark.sql.catalyst.expressions.Attribute
|
||||
|
||||
object JoinType {
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
package org.apache.spark.sql.catalyst.plans.logical
|
||||
|
||||
import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression}
|
||||
import org.apache.spark.sql.catalyst.expressions.Attribute
|
||||
import org.apache.spark.sql.types.MetadataBuilder
|
||||
import org.apache.spark.unsafe.types.CalendarInterval
|
||||
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.apache.spark.sql.catalyst.catalog._
|
|||
import org.apache.spark.sql.catalyst.expressions.{AttributeMap, AttributeReference, Expression}
|
||||
import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, Statistics}
|
||||
import org.apache.spark.sql.execution.FileRelation
|
||||
import org.apache.spark.sql.hive.client.HiveClient
|
||||
import org.apache.spark.sql.types.StructField
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue