spark-instrumented-optimizer/python/mypy.ini
garawalid 176218b6b8 [SPARK-35292][PYTHON] Delete redundant parameter in mypy configuration
### What changes were proposed in this pull request?

The parameter **no_implicit_optional** is defined twice in the mypy configuration, [ligne 20](https://github.com/apache/spark/blob/master/python/mypy.ini#L20) and ligne 105.

### Why are the changes needed?

We would like to keep the mypy configuration clean.

### Does this PR introduce _any_ user-facing change?

No.

### How was this patch tested?

This patch can be tested with `dev/lint-python`

Closes #32418 from garawalid/feature/clean-mypy-config.

Authored-by: garawalid <gwalid94@gmail.com>
Signed-off-by: HyukjinKwon <gurwls223@apache.org>
2021-05-04 09:01:34 +09:00

131 lines
2.9 KiB
INI

;
; Licensed to the Apache Software Foundation (ASF) under one or more
; contributor license agreements. See the NOTICE file distributed with
; this work for additional information regarding copyright ownership.
; The ASF licenses this file to You under the Apache License, Version 2.0
; (the "License"); you may not use this file except in compliance with
; the License. You may obtain a copy of the License at
;
; http://www.apache.org/licenses/LICENSE-2.0
;
; Unless required by applicable law or agreed to in writing, software
; distributed under the License is distributed on an "AS IS" BASIS,
; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
; See the License for the specific language governing permissions and
; limitations under the License.
;
[mypy]
strict_optional = True
no_implicit_optional = True
disallow_untyped_defs = True
; Allow untyped def in internal modules and tests
[mypy-pyspark.daemon]
disallow_untyped_defs = False
[mypy-pyspark.find_spark_home]
disallow_untyped_defs = False
[mypy-pyspark._globals]
disallow_untyped_defs = False
[mypy-pyspark.install]
disallow_untyped_defs = False
[mypy-pyspark.java_gateway]
disallow_untyped_defs = False
[mypy-pyspark.join]
disallow_untyped_defs = False
[mypy-pyspark.ml.tests.*]
disallow_untyped_defs = False
[mypy-pyspark.mllib.tests.*]
disallow_untyped_defs = False
[mypy-pyspark.rddsampler]
disallow_untyped_defs = False
[mypy-pyspark.resource.tests.*]
disallow_untyped_defs = False
[mypy-pyspark.serializers]
disallow_untyped_defs = False
[mypy-pyspark.shuffle]
disallow_untyped_defs = False
[mypy-pyspark.streaming.tests.*]
disallow_untyped_defs = False
[mypy-pyspark.streaming.util]
disallow_untyped_defs = False
[mypy-pyspark.sql.tests.*]
disallow_untyped_defs = False
[mypy-pyspark.sql.pandas.serializers]
disallow_untyped_defs = False
[mypy-pyspark.sql.pandas.types]
disallow_untyped_defs = False
[mypy-pyspark.sql.pandas.typehints]
disallow_untyped_defs = False
[mypy-pyspark.sql.pandas.utils]
disallow_untyped_defs = False
[mypy-pyspark.sql.pandas._typing.protocols.*]
disallow_untyped_defs = False
[mypy-pyspark.sql.utils]
disallow_untyped_defs = False
[mypy-pyspark.tests.*]
disallow_untyped_defs = False
[mypy-pyspark.testing.*]
disallow_untyped_defs = False
[mypy-pyspark.traceback_utils]
disallow_untyped_defs = False
[mypy-pyspark.util]
disallow_untyped_defs = False
[mypy-pyspark.worker]
disallow_untyped_defs = False
; Ignore errors in embedded third party code
[mypy-pyspark.cloudpickle.*]
ignore_errors = True
; Ignore missing imports for external untyped packages
[mypy-py4j.*]
ignore_missing_imports = True
[mypy-numpy]
ignore_missing_imports = True
[mypy-scipy.*]
ignore_missing_imports = True
[mypy-pandas.*]
ignore_missing_imports = True
[mypy-pyarrow.*]
ignore_missing_imports = True
[mypy-psutil.*]
ignore_missing_imports = True
# TODO(SPARK-34941): Enable mypy for pandas-on-Spark
[mypy-pyspark.pandas.*]
ignore_errors = True