[SPARK-18985][SS] Add missing @InterfaceStability.Evolving for Structured Streaming APIs

## What changes were proposed in this pull request?

Add missing InterfaceStability.Evolving for Structured Streaming APIs

## How was this patch tested?

Compiling the codes.

Author: Shixiong Zhu <shixiong@databricks.com>

Closes #16385 from zsxwing/SPARK-18985.
This commit is contained in:
Shixiong Zhu 2016-12-22 16:21:09 -08:00
parent ce99f51d2e
commit 2246ce88ae
9 changed files with 28 additions and 9 deletions

View file

@ -19,7 +19,7 @@ package org.apache.spark.sql.streaming
import scala.collection.JavaConverters._
import org.apache.spark.annotation.Experimental
import org.apache.spark.annotation.{Experimental, InterfaceStability}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{AnalysisException, DataFrame, Dataset, SparkSession}
import org.apache.spark.sql.execution.datasources.DataSource
@ -33,6 +33,7 @@ import org.apache.spark.sql.types.StructType
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
final class DataStreamReader private[sql](sparkSession: SparkSession) extends Logging {
/**
* Specifies the input data source format.

View file

@ -19,7 +19,7 @@ package org.apache.spark.sql.streaming
import scala.collection.JavaConverters._
import org.apache.spark.annotation.Experimental
import org.apache.spark.annotation.{Experimental, InterfaceStability}
import org.apache.spark.sql.{AnalysisException, DataFrame, Dataset, ForeachWriter}
import org.apache.spark.sql.catalyst.streaming.InternalOutputModes._
import org.apache.spark.sql.execution.datasources.DataSource
@ -33,6 +33,7 @@ import org.apache.spark.sql.execution.streaming.{ForeachSink, MemoryPlan, Memory
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
final class DataStreamWriter[T] private[sql](ds: Dataset[T]) {
private val df = ds.toDF()

View file

@ -19,7 +19,7 @@ package org.apache.spark.sql.streaming
import java.util.UUID
import org.apache.spark.annotation.Experimental
import org.apache.spark.annotation.{Experimental, InterfaceStability}
import org.apache.spark.sql.SparkSession
/**
@ -29,6 +29,7 @@ import org.apache.spark.sql.SparkSession
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
trait StreamingQuery {
/**

View file

@ -17,7 +17,7 @@
package org.apache.spark.sql.streaming
import org.apache.spark.annotation.Experimental
import org.apache.spark.annotation.{Experimental, InterfaceStability}
/**
* :: Experimental ::
@ -30,6 +30,7 @@ import org.apache.spark.annotation.Experimental
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
class StreamingQueryException private[sql](
private val queryDebugString: String,
val message: String,

View file

@ -19,7 +19,7 @@ package org.apache.spark.sql.streaming
import java.util.UUID
import org.apache.spark.annotation.Experimental
import org.apache.spark.annotation.{Experimental, InterfaceStability}
import org.apache.spark.scheduler.SparkListenerEvent
/**
@ -30,6 +30,7 @@ import org.apache.spark.scheduler.SparkListenerEvent
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
abstract class StreamingQueryListener {
import StreamingQueryListener._
@ -70,6 +71,7 @@ abstract class StreamingQueryListener {
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
object StreamingQueryListener {
/**
@ -78,6 +80,7 @@ object StreamingQueryListener {
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
trait Event extends SparkListenerEvent
/**
@ -89,6 +92,7 @@ object StreamingQueryListener {
* @since 2.1.0
*/
@Experimental
@InterfaceStability.Evolving
class QueryStartedEvent private[sql](
val id: UUID,
val runId: UUID,
@ -101,6 +105,7 @@ object StreamingQueryListener {
* @since 2.1.0
*/
@Experimental
@InterfaceStability.Evolving
class QueryProgressEvent private[sql](val progress: StreamingQueryProgress) extends Event
/**
@ -114,6 +119,7 @@ object StreamingQueryListener {
* @since 2.1.0
*/
@Experimental
@InterfaceStability.Evolving
class QueryTerminatedEvent private[sql](
val id: UUID,
val runId: UUID,

View file

@ -24,7 +24,7 @@ import scala.collection.mutable
import org.apache.hadoop.fs.Path
import org.apache.spark.annotation.Experimental
import org.apache.spark.annotation.{Experimental, InterfaceStability}
import org.apache.spark.sql.{AnalysisException, DataFrame, SparkSession}
import org.apache.spark.sql.catalyst.analysis.UnsupportedOperationChecker
import org.apache.spark.sql.execution.streaming._
@ -39,6 +39,7 @@ import org.apache.spark.util.{Clock, SystemClock, Utils}
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
class StreamingQueryManager private[sql] (sparkSession: SparkSession) {
private[sql] val stateStoreCoordinator =

View file

@ -22,7 +22,7 @@ import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import org.apache.spark.annotation.Experimental
import org.apache.spark.annotation.{Experimental, InterfaceStability}
/**
* :: Experimental ::
@ -36,6 +36,7 @@ import org.apache.spark.annotation.Experimental
* @since 2.1.0
*/
@Experimental
@InterfaceStability.Evolving
class StreamingQueryStatus protected[sql](
val message: String,
val isDataAvailable: Boolean,

View file

@ -23,7 +23,7 @@ import scala.concurrent.duration.Duration
import org.apache.commons.lang3.StringUtils
import org.apache.spark.annotation.Experimental
import org.apache.spark.annotation.{Experimental, InterfaceStability}
import org.apache.spark.unsafe.types.CalendarInterval
/**
@ -33,6 +33,7 @@ import org.apache.spark.unsafe.types.CalendarInterval
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
sealed trait Trigger
/**
@ -59,6 +60,7 @@ sealed trait Trigger
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
case class ProcessingTime(intervalMs: Long) extends Trigger {
require(intervalMs >= 0, "the interval of trigger should not be negative")
}
@ -70,6 +72,7 @@ case class ProcessingTime(intervalMs: Long) extends Trigger {
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
object ProcessingTime {
/**

View file

@ -29,13 +29,14 @@ import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import org.apache.spark.annotation.Experimental
import org.apache.spark.annotation.{Experimental, InterfaceStability}
/**
* :: Experimental ::
* Information about updates made to stateful operators in a [[StreamingQuery]] during a trigger.
*/
@Experimental
@InterfaceStability.Evolving
class StateOperatorProgress private[sql](
val numRowsTotal: Long,
val numRowsUpdated: Long) extends Serializable {
@ -80,6 +81,7 @@ class StateOperatorProgress private[sql](
* @since 2.1.0
*/
@Experimental
@InterfaceStability.Evolving
class StreamingQueryProgress private[sql](
val id: UUID,
val runId: UUID,
@ -151,6 +153,7 @@ class StreamingQueryProgress private[sql](
* @since 2.1.0
*/
@Experimental
@InterfaceStability.Evolving
class SourceProgress protected[sql](
val description: String,
val startOffset: String,
@ -196,6 +199,7 @@ class SourceProgress protected[sql](
* @since 2.1.0
*/
@Experimental
@InterfaceStability.Evolving
class SinkProgress protected[sql](
val description: String) extends Serializable {