[SPARK-35019][PYTHON][SQL] Fix type hints mismatches in pyspark.sql.*
### What changes were proposed in this pull request? Fix type hints mismatches in pyspark.sql.* ### Why are the changes needed? There were some mismatches in pyspark.sql.* ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? dev/lint-python passed. Closes #32122 from Yikun/SPARK-35019. Authored-by: Yikun Jiang <yikunkero@gmail.com> Signed-off-by: HyukjinKwon <gurwls223@apache.org>
This commit is contained in:
parent
8627cab39d
commit
b43f7e6a97
|
@ -17,6 +17,7 @@
|
|||
# under the License.
|
||||
|
||||
from typing import Any, Callable, List, Optional
|
||||
from pyspark.sql._typing import UserDefinedFunctionLike
|
||||
from pyspark.sql.dataframe import DataFrame
|
||||
from pyspark.sql.session import SparkSession
|
||||
from pyspark.sql.types import DataType, StructType
|
||||
|
@ -53,7 +54,7 @@ class Catalog:
|
|||
def dropGlobalTempView(self, viewName: str) -> None: ...
|
||||
def registerFunction(
|
||||
self, name: str, f: Callable[..., Any], returnType: DataType = ...
|
||||
) -> None: ...
|
||||
) -> UserDefinedFunctionLike: ...
|
||||
def isCached(self, tableName: str) -> bool: ...
|
||||
def cacheTable(self, tableName: str) -> None: ...
|
||||
def uncacheTable(self, tableName: str) -> None: ...
|
||||
|
|
|
@ -21,7 +21,7 @@ from py4j.java_gateway import JavaObject # type: ignore[import]
|
|||
|
||||
class RuntimeConfig:
|
||||
def __init__(self, jconf: JavaObject) -> None: ...
|
||||
def set(self, key: str, value: str) -> str: ...
|
||||
def set(self, key: str, value: str) -> None: ...
|
||||
def get(self, key: str, default: Optional[str] = ...) -> str: ...
|
||||
def unset(self, key: str) -> None: ...
|
||||
def isModifiable(self, key: str) -> bool: ...
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from pyspark.sql._typing import UserDefinedFunctionLike
|
||||
|
||||
from typing import overload
|
||||
from typing import Any, Callable, Iterable, List, Optional, Tuple, TypeVar, Union
|
||||
|
@ -63,7 +64,7 @@ class SQLContext:
|
|||
) -> DataFrame: ...
|
||||
def registerFunction(
|
||||
self, name: str, f: Callable[..., Any], returnType: DataType = ...
|
||||
) -> None: ...
|
||||
) -> UserDefinedFunctionLike: ...
|
||||
def registerJavaFunction(
|
||||
self, name: str, javaClassName: str, returnType: Optional[DataType] = ...
|
||||
) -> None: ...
|
||||
|
|
|
@ -588,7 +588,7 @@ class DataFrame(PandasMapOpsMixin, PandasConversionMixin):
|
|||
|
||||
Parameters
|
||||
----------
|
||||
eventTime : str or :class:`Column`
|
||||
eventTime : str
|
||||
the name of the column that contains the event time of the row.
|
||||
delayThreshold : str
|
||||
the minimum delay to wait to data to arrive late, relative to the
|
||||
|
|
|
@ -85,7 +85,7 @@ class DataFrame(PandasMapOpsMixin, PandasConversionMixin):
|
|||
def checkpoint(self, eager: bool = ...) -> DataFrame: ...
|
||||
def localCheckpoint(self, eager: bool = ...) -> DataFrame: ...
|
||||
def withWatermark(
|
||||
self, eventTime: ColumnOrName, delayThreshold: str
|
||||
self, eventTime: str, delayThreshold: str
|
||||
) -> DataFrame: ...
|
||||
def hint(self, name: str, *parameters: Union[PrimitiveType, List[PrimitiveType]]) -> DataFrame: ...
|
||||
def count(self) -> int: ...
|
||||
|
|
|
@ -57,7 +57,7 @@ def monotonically_increasing_id() -> Column: ...
|
|||
def nanvl(col1: ColumnOrName, col2: ColumnOrName) -> Column: ...
|
||||
def percentile_approx(
|
||||
col: ColumnOrName,
|
||||
percentage: Union[Column, float, List[float]],
|
||||
percentage: Union[Column, float, List[float], tuple[float]],
|
||||
accuracy: Union[Column, float] = ...,
|
||||
) -> Column: ...
|
||||
def rand(seed: Optional[int] = ...) -> Column: ...
|
||||
|
|
Loading…
Reference in a new issue