spark-instrumented-optimizer/python/pyspark/ml/param/shared.pyi
zero323 31a16fbb40 [SPARK-32714][PYTHON] Initial pyspark-stubs port
### What changes were proposed in this pull request?

This PR proposes migration of [`pyspark-stubs`](https://github.com/zero323/pyspark-stubs) into Spark codebase.

### Why are the changes needed?

### Does this PR introduce _any_ user-facing change?

Yes. This PR adds type annotations directly to Spark source.

This can impact interaction with development tools for users, which haven't used `pyspark-stubs`.

### How was this patch tested?

- [x] MyPy tests of the PySpark source
    ```
    mypy --no-incremental --config python/mypy.ini python/pyspark
    ```
- [x] MyPy tests of Spark examples
    ```
   MYPYPATH=python/ mypy --no-incremental --config python/mypy.ini examples/src/main/python/ml examples/src/main/python/sql examples/src/main/python/sql/streaming
    ```
- [x] Existing Flake8 linter

- [x] Existing unit tests

Tested against:

- `mypy==0.790+dev.e959952d9001e9713d329a2f9b196705b028f894`
- `mypy==0.782`

Closes #29591 from zero323/SPARK-32681.

Authored-by: zero323 <mszymkiewicz@gmail.com>
Signed-off-by: HyukjinKwon <gurwls223@apache.org>
2020-09-24 14:15:36 +09:00

188 lines
5.3 KiB
Python

#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Any, Generic, List
from pyspark.ml._typing import T
from pyspark.ml.param import *
class HasMaxIter(Params):
maxIter: Param[int]
def __init__(self) -> None: ...
def getMaxIter(self) -> int: ...
class HasRegParam(Params):
regParam: Param[float]
def __init__(self) -> None: ...
def getRegParam(self) -> float: ...
class HasFeaturesCol(Params):
featuresCol: Param[str]
def __init__(self) -> None: ...
def getFeaturesCol(self) -> str: ...
class HasLabelCol(Params):
labelCol: Param[str]
def __init__(self) -> None: ...
def getLabelCol(self) -> str: ...
class HasPredictionCol(Params):
predictionCol: Param[str]
def __init__(self) -> None: ...
def getPredictionCol(self) -> str: ...
class HasProbabilityCol(Params):
probabilityCol: Param[str]
def __init__(self) -> None: ...
def getProbabilityCol(self) -> str: ...
class HasRawPredictionCol(Params):
rawPredictionCol: Param[str]
def __init__(self) -> None: ...
def getRawPredictionCol(self) -> str: ...
class HasInputCol(Params):
inputCol: Param[str]
def __init__(self) -> None: ...
def getInputCol(self) -> str: ...
class HasInputCols(Params):
inputCols: Param[List[str]]
def __init__(self) -> None: ...
def getInputCols(self) -> List[str]: ...
class HasOutputCol(Params):
outputCol: Param[str]
def __init__(self) -> None: ...
def getOutputCol(self) -> str: ...
class HasOutputCols(Params):
outputCols: Param[List[str]]
def __init__(self) -> None: ...
def getOutputCols(self) -> List[str]: ...
class HasNumFeatures(Params):
numFeatures: Param[int]
def __init__(self) -> None: ...
def getNumFeatures(self) -> int: ...
class HasCheckpointInterval(Params):
checkpointInterval: Param[int]
def __init__(self) -> None: ...
def getCheckpointInterval(self) -> int: ...
class HasSeed(Params):
seed: Param[int]
def __init__(self) -> None: ...
def getSeed(self) -> int: ...
class HasTol(Params):
tol: Param[float]
def __init__(self) -> None: ...
def getTol(self) -> float: ...
class HasRelativeError(Params):
relativeError: Param[float]
def __init__(self) -> None: ...
def getRelativeError(self) -> float: ...
class HasStepSize(Params):
stepSize: Param[float]
def __init__(self) -> None: ...
def getStepSize(self) -> float: ...
class HasHandleInvalid(Params):
handleInvalid: Param[str]
def __init__(self) -> None: ...
def getHandleInvalid(self) -> str: ...
class HasElasticNetParam(Params):
elasticNetParam: Param[float]
def __init__(self) -> None: ...
def getElasticNetParam(self) -> float: ...
class HasFitIntercept(Params):
fitIntercept: Param[bool]
def __init__(self) -> None: ...
def getFitIntercept(self) -> bool: ...
class HasStandardization(Params):
standardization: Param[bool]
def __init__(self) -> None: ...
def getStandardization(self) -> bool: ...
class HasThresholds(Params):
thresholds: Param[List[float]]
def __init__(self) -> None: ...
def getThresholds(self) -> List[float]: ...
class HasThreshold(Params):
threshold: Param[float]
def __init__(self) -> None: ...
def getThreshold(self) -> float: ...
class HasWeightCol(Params):
weightCol: Param[str]
def __init__(self) -> None: ...
def getWeightCol(self) -> str: ...
class HasSolver(Params):
solver: Param[str]
def __init__(self) -> None: ...
def getSolver(self) -> str: ...
class HasVarianceCol(Params):
varianceCol: Param[str]
def __init__(self) -> None: ...
def getVarianceCol(self) -> str: ...
class HasAggregationDepth(Params):
aggregationDepth: Param[int]
def __init__(self) -> None: ...
def getAggregationDepth(self) -> int: ...
class HasParallelism(Params):
parallelism: Param[int]
def __init__(self) -> None: ...
def getParallelism(self) -> int: ...
class HasCollectSubModels(Params):
collectSubModels: Param[bool]
def __init__(self) -> None: ...
def getCollectSubModels(self) -> bool: ...
class HasLoss(Params):
loss: Param[str]
def __init__(self) -> None: ...
def getLoss(self) -> str: ...
class HasValidationIndicatorCol(Params):
validationIndicatorCol: Param[str]
def __init__(self) -> None: ...
def getValidationIndicatorCol(self) -> str: ...
class HasDistanceMeasure(Params):
distanceMeasure: Param[str]
def __init__(self) -> None: ...
def getDistanceMeasure(self) -> str: ...
class HasBlockSize(Params):
blockSize: Param[int]
def __init__(self) -> None: ...
def getBlockSize(self) -> int: ...