Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-50770][SS] Removing package scope for transformWithState operator APIs #49417

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import org.apache.spark.annotation.{Evolving, Experimental}
*/
@Experimental
@Evolving
ericm-db marked this conversation as resolved.
Show resolved Hide resolved
private[sql] trait ExpiredTimerInfo extends Serializable {
trait ExpiredTimerInfo extends Serializable {

/**
* Get the expired timer's expiry time as milliseconds in epoch time.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import org.apache.spark.annotation.{Evolving, Experimental}
/**
* Interface used for arbitrary stateful operations with the v2 API to capture list value state.
*/
private[sql] trait ListState[S] extends Serializable {
trait ListState[S] extends Serializable {

/** Whether state exists or not. */
def exists(): Boolean
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import org.apache.spark.annotation.{Evolving, Experimental}
*/
@Experimental
@Evolving
private[sql] trait QueryInfo extends Serializable {
trait QueryInfo extends Serializable {

/** Returns the streaming query id associated with stateful operator */
def getQueryId: UUID
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ import org.apache.spark.sql.errors.ExecutionErrors
*/
@Experimental
@Evolving
private[sql] abstract class StatefulProcessor[K, I, O] extends Serializable {
abstract class StatefulProcessor[K, I, O] extends Serializable {

// scalastyle:off
// Disable style checker so "implicits" object can start with lowercase i
Expand Down Expand Up @@ -125,7 +125,7 @@ private[sql] abstract class StatefulProcessor[K, I, O] extends Serializable {
*/
@Experimental
@Evolving
private[sql] abstract class StatefulProcessorWithInitialState[K, I, O, S]
abstract class StatefulProcessorWithInitialState[K, I, O, S]
extends StatefulProcessor[K, I, O] {

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import org.apache.spark.sql.Encoder
*/
@Experimental
@Evolving
private[sql] trait StatefulProcessorHandle extends Serializable {
trait StatefulProcessorHandle extends Serializable {

/**
* Function to create new or return existing single value state variable of given type with ttl.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import org.apache.spark.annotation.{Evolving, Experimental}
*/
@Experimental
@Evolving
private[sql] trait TimerValues extends Serializable {
trait TimerValues extends Serializable {

/**
* Get the current processing time as milliseconds in epoch time.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import org.apache.spark.annotation.{Evolving, Experimental}
/**
* Interface used for arbitrary stateful operations with the v2 API to capture single value state.
*/
private[sql] trait ValueState[S] extends Serializable {
trait ValueState[S] extends Serializable {

/** Whether state exists or not. */
def exists(): Boolean
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ class KeyValueGroupedDataset[K, V] private[sql](
}

/** @inheritdoc */
private[sql] def transformWithState[U: Encoder](
def transformWithState[U: Encoder](
statefulProcessor: StatefulProcessor[K, V, U],
timeMode: TimeMode,
outputMode: OutputMode): Dataset[U] = {
Expand All @@ -219,7 +219,7 @@ class KeyValueGroupedDataset[K, V] private[sql](
}

/** @inheritdoc */
private[sql] def transformWithState[U: Encoder](
def transformWithState[U: Encoder](
statefulProcessor: StatefulProcessor[K, V, U],
eventTimeColumnName: String,
outputMode: OutputMode): Dataset[U] = {
Expand All @@ -235,7 +235,7 @@ class KeyValueGroupedDataset[K, V] private[sql](
}

/** @inheritdoc */
private[sql] def transformWithState[U: Encoder, S: Encoder](
def transformWithState[U: Encoder, S: Encoder](
statefulProcessor: StatefulProcessorWithInitialState[K, V, U, S],
timeMode: TimeMode,
outputMode: OutputMode,
Expand All @@ -257,7 +257,7 @@ class KeyValueGroupedDataset[K, V] private[sql](
}

/** @inheritdoc */
private[sql] def transformWithState[U: Encoder, S: Encoder](
def transformWithState[U: Encoder, S: Encoder](
statefulProcessor: StatefulProcessorWithInitialState[K, V, U, S],
eventTimeColumnName: String,
outputMode: OutputMode,
Expand Down
Loading