2014-08-06 15:58:24 -04:00
|
|
|
#!/usr/bin/env bash
|
|
|
|
#
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
# this work for additional information regarding copyright ownership.
|
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
# (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
2018-11-20 15:38:40 -05:00
|
|
|
# define test binaries + versions
|
|
|
|
FLAKE8_BUILD="flake8"
|
2018-11-07 23:26:21 -05:00
|
|
|
MINIMUM_FLAKE8="3.5.0"
|
2020-10-19 15:50:01 -04:00
|
|
|
MYPY_BUILD="mypy"
|
2018-11-20 15:38:40 -05:00
|
|
|
PYCODESTYLE_BUILD="pycodestyle"
|
2020-07-26 21:43:32 -04:00
|
|
|
MINIMUM_PYCODESTYLE="2.6.0"
|
2014-08-06 15:58:24 -04:00
|
|
|
|
2018-11-20 15:38:40 -05:00
|
|
|
SPHINX_BUILD="sphinx-build"
|
2014-08-06 15:58:24 -04:00
|
|
|
|
2020-02-05 20:01:33 -05:00
|
|
|
PYTHON_EXECUTABLE="python3"
|
|
|
|
|
2020-03-15 00:09:35 -04:00
|
|
|
function satisfies_min_version {
|
|
|
|
local provided_version="$1"
|
|
|
|
local expected_version="$2"
|
|
|
|
echo "$(
|
|
|
|
"$PYTHON_EXECUTABLE" << EOM
|
|
|
|
from setuptools.extern.packaging import version
|
|
|
|
print(version.parse('$provided_version') >= version.parse('$expected_version'))
|
|
|
|
EOM
|
|
|
|
)"
|
|
|
|
}
|
|
|
|
|
2018-11-20 15:38:40 -05:00
|
|
|
function compile_python_test {
|
|
|
|
local COMPILE_STATUS=
|
|
|
|
local COMPILE_REPORT=
|
|
|
|
|
|
|
|
if [[ ! "$1" ]]; then
|
|
|
|
echo "No python files found! Something is very wrong -- exiting."
|
|
|
|
exit 1;
|
|
|
|
fi
|
2015-03-19 15:46:10 -04:00
|
|
|
|
2020-01-30 06:17:29 -05:00
|
|
|
# compileall: https://docs.python.org/3/library/compileall.html
|
2018-11-20 15:38:40 -05:00
|
|
|
echo "starting python compilation test..."
|
2020-02-05 20:01:33 -05:00
|
|
|
COMPILE_REPORT=$( ("$PYTHON_EXECUTABLE" -B -mcompileall -q -l -x "[/\\\\][.]git" $1) 2>&1)
|
2018-11-20 15:38:40 -05:00
|
|
|
COMPILE_STATUS=$?
|
|
|
|
|
|
|
|
if [ $COMPILE_STATUS -ne 0 ]; then
|
|
|
|
echo "Python compilation failed with the following errors:"
|
|
|
|
echo "$COMPILE_REPORT"
|
|
|
|
echo "$COMPILE_STATUS"
|
|
|
|
exit "$COMPILE_STATUS"
|
|
|
|
else
|
|
|
|
echo "python compilation succeeded."
|
|
|
|
echo
|
|
|
|
fi
|
|
|
|
}
|
2014-08-06 15:58:24 -04:00
|
|
|
|
2018-11-20 15:38:40 -05:00
|
|
|
function pycodestyle_test {
|
|
|
|
local PYCODESTYLE_STATUS=
|
|
|
|
local PYCODESTYLE_REPORT=
|
|
|
|
local RUN_LOCAL_PYCODESTYLE=
|
2020-03-15 00:09:35 -04:00
|
|
|
local PYCODESTYLE_VERSION=
|
2018-11-20 15:38:40 -05:00
|
|
|
local EXPECTED_PYCODESTYLE=
|
|
|
|
local PYCODESTYLE_SCRIPT_PATH="$SPARK_ROOT_DIR/dev/pycodestyle-$MINIMUM_PYCODESTYLE.py"
|
|
|
|
local PYCODESTYLE_SCRIPT_REMOTE_PATH="https://raw.githubusercontent.com/PyCQA/pycodestyle/$MINIMUM_PYCODESTYLE/pycodestyle.py"
|
2014-08-06 15:58:24 -04:00
|
|
|
|
2018-11-20 15:38:40 -05:00
|
|
|
if [[ ! "$1" ]]; then
|
|
|
|
echo "No python files found! Something is very wrong -- exiting."
|
|
|
|
exit 1;
|
2015-04-18 19:46:28 -04:00
|
|
|
fi
|
2018-11-20 15:38:40 -05:00
|
|
|
|
|
|
|
# check for locally installed pycodestyle & version
|
|
|
|
RUN_LOCAL_PYCODESTYLE="False"
|
|
|
|
if hash "$PYCODESTYLE_BUILD" 2> /dev/null; then
|
2020-03-15 00:09:35 -04:00
|
|
|
PYCODESTYLE_VERSION="$($PYCODESTYLE_BUILD --version)"
|
|
|
|
EXPECTED_PYCODESTYLE="$(satisfies_min_version $PYCODESTYLE_VERSION $MINIMUM_PYCODESTYLE)"
|
2018-11-20 15:38:40 -05:00
|
|
|
if [ "$EXPECTED_PYCODESTYLE" == "True" ]; then
|
|
|
|
RUN_LOCAL_PYCODESTYLE="True"
|
2018-11-07 23:26:21 -05:00
|
|
|
fi
|
2018-11-20 15:38:40 -05:00
|
|
|
fi
|
[SPARK-25270] lint-python: Add flake8 to find syntax errors and undefined names
## What changes were proposed in this pull request?
Add [flake8](http://flake8.pycqa.org) tests to find Python syntax errors and undefined names.
__E901,E999,F821,F822,F823__ are the "_showstopper_" flake8 issues that can halt the runtime with a SyntaxError, NameError, etc. Most other flake8 issues are merely "style violations" -- useful for readability but they do not effect runtime safety.
* F821: undefined name `name`
* F822: undefined name `name` in `__all__`
* F823: local variable name referenced before assignment
* E901: SyntaxError or IndentationError
* E999: SyntaxError -- failed to compile a file into an Abstract Syntax Tree
## How was this patch tested?
$ __flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics__
$ __flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics__
Please review http://spark.apache.org/contributing.html before opening a pull request.
Closes #22266 from cclauss/patch-3.
Authored-by: cclauss <cclauss@bluewin.ch>
Signed-off-by: Holden Karau <holden@pigscanfly.ca>
2018-09-07 12:35:25 -04:00
|
|
|
|
2018-11-20 15:38:40 -05:00
|
|
|
# download the right version or run locally
|
|
|
|
if [ $RUN_LOCAL_PYCODESTYLE == "False" ]; then
|
|
|
|
# Get pycodestyle at runtime so that we don't rely on it being installed on the build server.
|
|
|
|
# See: https://github.com/apache/spark/pull/1744#issuecomment-50982162
|
|
|
|
# Updated to the latest official version of pep8. pep8 is formally renamed to pycodestyle.
|
|
|
|
echo "downloading pycodestyle from $PYCODESTYLE_SCRIPT_REMOTE_PATH..."
|
|
|
|
if [ ! -e "$PYCODESTYLE_SCRIPT_PATH" ]; then
|
|
|
|
curl --silent -o "$PYCODESTYLE_SCRIPT_PATH" "$PYCODESTYLE_SCRIPT_REMOTE_PATH"
|
|
|
|
local curl_status="$?"
|
|
|
|
|
|
|
|
if [ "$curl_status" -ne 0 ]; then
|
|
|
|
echo "Failed to download pycodestyle.py from $PYCODESTYLE_SCRIPT_REMOTE_PATH"
|
|
|
|
exit "$curl_status"
|
|
|
|
fi
|
2018-11-07 23:26:21 -05:00
|
|
|
fi
|
2018-11-20 15:38:40 -05:00
|
|
|
|
|
|
|
echo "starting pycodestyle test..."
|
2020-02-05 20:01:33 -05:00
|
|
|
PYCODESTYLE_REPORT=$( ("$PYTHON_EXECUTABLE" "$PYCODESTYLE_SCRIPT_PATH" --config=dev/tox.ini $1) 2>&1)
|
2018-11-20 15:38:40 -05:00
|
|
|
PYCODESTYLE_STATUS=$?
|
|
|
|
else
|
|
|
|
# we have the right version installed, so run locally
|
|
|
|
echo "starting pycodestyle test..."
|
|
|
|
PYCODESTYLE_REPORT=$( ($PYCODESTYLE_BUILD --config=dev/tox.ini $1) 2>&1)
|
|
|
|
PYCODESTYLE_STATUS=$?
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ $PYCODESTYLE_STATUS -ne 0 ]; then
|
|
|
|
echo "pycodestyle checks failed:"
|
|
|
|
echo "$PYCODESTYLE_REPORT"
|
|
|
|
exit "$PYCODESTYLE_STATUS"
|
2018-11-07 23:26:21 -05:00
|
|
|
else
|
2018-11-20 15:38:40 -05:00
|
|
|
echo "pycodestyle checks passed."
|
|
|
|
echo
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2020-10-19 15:50:01 -04:00
|
|
|
function mypy_test {
|
|
|
|
local MYPY_REPORT=
|
|
|
|
local MYPY_STATUS=
|
|
|
|
|
|
|
|
if ! hash "$MYPY_BUILD" 2> /dev/null; then
|
|
|
|
echo "The $MYPY_BUILD command was not found. Skipping for now."
|
|
|
|
return
|
|
|
|
fi
|
|
|
|
|
|
|
|
echo "starting $MYPY_BUILD test..."
|
|
|
|
MYPY_REPORT=$( ($MYPY_BUILD --config-file python/mypy.ini python/pyspark) 2>&1)
|
|
|
|
MYPY_STATUS=$?
|
|
|
|
|
|
|
|
if [ "$MYPY_STATUS" -ne 0 ]; then
|
|
|
|
echo "mypy checks failed:"
|
|
|
|
echo "$MYPY_REPORT"
|
|
|
|
echo "$MYPY_STATUS"
|
|
|
|
exit "$MYPY_STATUS"
|
|
|
|
else
|
|
|
|
echo "mypy checks passed."
|
|
|
|
echo
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2018-11-20 15:38:40 -05:00
|
|
|
function flake8_test {
|
|
|
|
local FLAKE8_VERSION=
|
|
|
|
local EXPECTED_FLAKE8=
|
|
|
|
local FLAKE8_REPORT=
|
|
|
|
local FLAKE8_STATUS=
|
|
|
|
|
|
|
|
if ! hash "$FLAKE8_BUILD" 2> /dev/null; then
|
|
|
|
echo "The flake8 command was not found."
|
2018-11-07 23:26:21 -05:00
|
|
|
echo "flake8 checks failed."
|
|
|
|
exit 1
|
|
|
|
fi
|
2018-10-27 09:20:42 -04:00
|
|
|
|
2020-03-15 00:09:35 -04:00
|
|
|
_FLAKE8_VERSION=($($FLAKE8_BUILD --version))
|
|
|
|
FLAKE8_VERSION="${_FLAKE8_VERSION[0]}"
|
|
|
|
EXPECTED_FLAKE8="$(satisfies_min_version $FLAKE8_VERSION $MINIMUM_FLAKE8)"
|
2018-11-20 15:38:40 -05:00
|
|
|
|
|
|
|
if [[ "$EXPECTED_FLAKE8" == "False" ]]; then
|
|
|
|
echo "\
|
|
|
|
The minimum flake8 version needs to be $MINIMUM_FLAKE8. Your current version is $FLAKE8_VERSION
|
|
|
|
|
|
|
|
flake8 checks failed."
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
echo "starting $FLAKE8_BUILD test..."
|
2020-08-08 11:51:57 -04:00
|
|
|
FLAKE8_REPORT=$( ($FLAKE8_BUILD --append-config dev/tox.ini --count --show-source --statistics .) 2>&1)
|
2018-11-20 15:38:40 -05:00
|
|
|
FLAKE8_STATUS=$?
|
|
|
|
|
|
|
|
if [ "$FLAKE8_STATUS" -ne 0 ]; then
|
|
|
|
echo "flake8 checks failed:"
|
|
|
|
echo "$FLAKE8_REPORT"
|
|
|
|
echo "$FLAKE8_STATUS"
|
|
|
|
exit "$FLAKE8_STATUS"
|
2018-10-27 09:20:42 -04:00
|
|
|
else
|
2018-11-20 15:38:40 -05:00
|
|
|
echo "flake8 checks passed."
|
|
|
|
echo
|
2018-10-27 09:20:42 -04:00
|
|
|
fi
|
2018-11-20 15:38:40 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
function sphinx_test {
|
|
|
|
local SPHINX_REPORT=
|
|
|
|
local SPHINX_STATUS=
|
|
|
|
|
|
|
|
# Check that the documentation builds acceptably, skip check if sphinx is not installed.
|
|
|
|
if ! hash "$SPHINX_BUILD" 2> /dev/null; then
|
2020-07-13 22:22:44 -04:00
|
|
|
echo "The $SPHINX_BUILD command was not found. Skipping Sphinx build for now."
|
|
|
|
echo
|
|
|
|
return
|
|
|
|
fi
|
|
|
|
|
|
|
|
PYTHON_HAS_SPHINX=$("$PYTHON_EXECUTABLE" -c 'import importlib.util; print(importlib.util.find_spec("sphinx") is not None)')
|
|
|
|
if [[ "$PYTHON_HAS_SPHINX" == "False" ]]; then
|
|
|
|
echo "$PYTHON_EXECUTABLE does not have Sphinx installed. Skipping Sphinx build for now."
|
2018-11-20 15:38:40 -05:00
|
|
|
echo
|
|
|
|
return
|
2020-07-27 04:49:21 -04:00
|
|
|
fi
|
|
|
|
|
|
|
|
# TODO(SPARK-32407): Sphinx 3.1+ does not correctly index nested classes.
|
|
|
|
# See also https://github.com/sphinx-doc/sphinx/issues/7551.
|
|
|
|
PYTHON_HAS_SPHINX_3_0=$("$PYTHON_EXECUTABLE" -c 'from distutils.version import LooseVersion; import sphinx; print(LooseVersion(sphinx.__version__) < LooseVersion("3.1.0"))')
|
|
|
|
if [[ "$PYTHON_HAS_SPHINX_3_0" == "False" ]]; then
|
|
|
|
echo "$PYTHON_EXECUTABLE has Sphinx 3.1+ installed but it requires lower then 3.1. Skipping Sphinx build for now."
|
|
|
|
echo
|
|
|
|
return
|
|
|
|
fi
|
|
|
|
|
|
|
|
# TODO(SPARK-32391): Install pydata_sphinx_theme in Jenkins machines
|
|
|
|
PYTHON_HAS_THEME=$("$PYTHON_EXECUTABLE" -c 'import importlib.util; print(importlib.util.find_spec("pydata_sphinx_theme") is not None)')
|
|
|
|
if [[ "$PYTHON_HAS_THEME" == "False" ]]; then
|
|
|
|
echo "$PYTHON_EXECUTABLE does not have pydata_sphinx_theme installed. Skipping Sphinx build for now."
|
|
|
|
echo
|
|
|
|
return
|
[SPARK-32204][SPARK-32182][DOCS] Add a quickstart page with Binder integration in PySpark documentation
### What changes were proposed in this pull request?
This PR proposes to:
- add a notebook with a Binder integration which allows users to try PySpark in a live notebook. Please [try this here](https://mybinder.org/v2/gh/HyukjinKwon/spark/SPARK-32204?filepath=python%2Fdocs%2Fsource%2Fgetting_started%2Fquickstart.ipynb).
- reuse this notebook as a quickstart guide in PySpark documentation.
Note that Binder turns a Git repo into a collection of interactive notebooks. It works based on Docker image. Once somebody builds, other people can reuse the image against a specific commit.
Therefore, if we run Binder with the images based on released tags in Spark, virtually all users can instantly launch the Jupyter notebooks.
<br/>
I made a simple demo to make it easier to review. Please see:
- [Main page](https://hyukjin-spark.readthedocs.io/en/stable/). Note that the link ("Live Notebook") in the main page wouldn't work since this PR is not merged yet.
- [Quickstart page](https://hyukjin-spark.readthedocs.io/en/stable/getting_started/quickstart.html)
<br/>
When reviewing the notebook file itself, please give my direct feedback which I will appreciate and address.
Another way might be:
- open [here](https://mybinder.org/v2/gh/HyukjinKwon/spark/SPARK-32204?filepath=python%2Fdocs%2Fsource%2Fgetting_started%2Fquickstart.ipynb).
- edit / change / update the notebook. Please feel free to change as whatever you want. I can apply as are or slightly update more when I apply to this PR.
- download it as a `.ipynb` file:
![Screen Shot 2020-08-20 at 10 12 19 PM](https://user-images.githubusercontent.com/6477701/90774311-3e38c800-e332-11ea-8476-699a653984db.png)
- upload the `.ipynb` file here in a GitHub comment. Then, I will push a commit with that file with crediting correctly, of course.
- alternatively, push a commit into this PR right away if that's easier for you (if you're a committer).
References:
- https://pandas.pydata.org/pandas-docs/stable/user_guide/10min.html
- https://databricks.com/jp/blog/2020/03/31/10-minutes-from-pandas-to-koalas-on-apache-spark.html - my own blog post .. :-) and https://koalas.readthedocs.io/en/latest/getting_started/10min.html
### Why are the changes needed?
To improve PySpark's usability. The current quickstart for Python users are very friendly.
### Does this PR introduce _any_ user-facing change?
Yes, it will add a documentation page, and expose a live notebook to PySpark users.
### How was this patch tested?
Manually tested, and GitHub Actions builds will test.
Closes #29491 from HyukjinKwon/SPARK-32204.
Lead-authored-by: HyukjinKwon <gurwls223@apache.org>
Co-authored-by: Fokko Driesprong <fokko@apache.org>
Signed-off-by: HyukjinKwon <gurwls223@apache.org>
2020-08-25 23:23:24 -04:00
|
|
|
fi
|
|
|
|
|
|
|
|
# TODO(SPARK-32666): Install nbsphinx in Jenkins machines
|
|
|
|
PYTHON_HAS_NBSPHINX=$("$PYTHON_EXECUTABLE" -c 'import importlib.util; print(importlib.util.find_spec("nbsphinx") is not None)')
|
|
|
|
if [[ "$PYTHON_HAS_NBSPHINX" == "False" ]]; then
|
|
|
|
echo "$PYTHON_EXECUTABLE does not have nbsphinx installed. Skipping Sphinx build for now."
|
|
|
|
echo
|
|
|
|
return
|
|
|
|
fi
|
|
|
|
|
|
|
|
# TODO(SPARK-32666): Install ipython in Jenkins machines
|
2020-09-09 00:22:13 -04:00
|
|
|
PYTHON_HAS_IPYTHON=$("$PYTHON_EXECUTABLE" -c 'import importlib.util; print(importlib.util.find_spec("IPython") is not None)')
|
[SPARK-32204][SPARK-32182][DOCS] Add a quickstart page with Binder integration in PySpark documentation
### What changes were proposed in this pull request?
This PR proposes to:
- add a notebook with a Binder integration which allows users to try PySpark in a live notebook. Please [try this here](https://mybinder.org/v2/gh/HyukjinKwon/spark/SPARK-32204?filepath=python%2Fdocs%2Fsource%2Fgetting_started%2Fquickstart.ipynb).
- reuse this notebook as a quickstart guide in PySpark documentation.
Note that Binder turns a Git repo into a collection of interactive notebooks. It works based on Docker image. Once somebody builds, other people can reuse the image against a specific commit.
Therefore, if we run Binder with the images based on released tags in Spark, virtually all users can instantly launch the Jupyter notebooks.
<br/>
I made a simple demo to make it easier to review. Please see:
- [Main page](https://hyukjin-spark.readthedocs.io/en/stable/). Note that the link ("Live Notebook") in the main page wouldn't work since this PR is not merged yet.
- [Quickstart page](https://hyukjin-spark.readthedocs.io/en/stable/getting_started/quickstart.html)
<br/>
When reviewing the notebook file itself, please give my direct feedback which I will appreciate and address.
Another way might be:
- open [here](https://mybinder.org/v2/gh/HyukjinKwon/spark/SPARK-32204?filepath=python%2Fdocs%2Fsource%2Fgetting_started%2Fquickstart.ipynb).
- edit / change / update the notebook. Please feel free to change as whatever you want. I can apply as are or slightly update more when I apply to this PR.
- download it as a `.ipynb` file:
![Screen Shot 2020-08-20 at 10 12 19 PM](https://user-images.githubusercontent.com/6477701/90774311-3e38c800-e332-11ea-8476-699a653984db.png)
- upload the `.ipynb` file here in a GitHub comment. Then, I will push a commit with that file with crediting correctly, of course.
- alternatively, push a commit into this PR right away if that's easier for you (if you're a committer).
References:
- https://pandas.pydata.org/pandas-docs/stable/user_guide/10min.html
- https://databricks.com/jp/blog/2020/03/31/10-minutes-from-pandas-to-koalas-on-apache-spark.html - my own blog post .. :-) and https://koalas.readthedocs.io/en/latest/getting_started/10min.html
### Why are the changes needed?
To improve PySpark's usability. The current quickstart for Python users are very friendly.
### Does this PR introduce _any_ user-facing change?
Yes, it will add a documentation page, and expose a live notebook to PySpark users.
### How was this patch tested?
Manually tested, and GitHub Actions builds will test.
Closes #29491 from HyukjinKwon/SPARK-32204.
Lead-authored-by: HyukjinKwon <gurwls223@apache.org>
Co-authored-by: Fokko Driesprong <fokko@apache.org>
Signed-off-by: HyukjinKwon <gurwls223@apache.org>
2020-08-25 23:23:24 -04:00
|
|
|
if [[ "$PYTHON_HAS_IPYTHON" == "False" ]]; then
|
|
|
|
echo "$PYTHON_EXECUTABLE does not have ipython installed. Skipping Sphinx build for now."
|
|
|
|
echo
|
|
|
|
return
|
2018-11-20 15:38:40 -05:00
|
|
|
fi
|
|
|
|
|
|
|
|
echo "starting $SPHINX_BUILD tests..."
|
|
|
|
pushd python/docs &> /dev/null
|
|
|
|
make clean &> /dev/null
|
|
|
|
# Treat warnings as errors so we stop correctly
|
|
|
|
SPHINX_REPORT=$( (SPHINXOPTS="-a -W" make html) 2>&1)
|
|
|
|
SPHINX_STATUS=$?
|
|
|
|
|
|
|
|
if [ "$SPHINX_STATUS" -ne 0 ]; then
|
|
|
|
echo "$SPHINX_BUILD checks failed:"
|
|
|
|
echo "$SPHINX_REPORT"
|
|
|
|
echo
|
|
|
|
echo "re-running make html to print full warning list:"
|
|
|
|
make clean &> /dev/null
|
|
|
|
SPHINX_REPORT=$( (SPHINXOPTS="-a" make html) 2>&1)
|
|
|
|
echo "$SPHINX_REPORT"
|
|
|
|
exit "$SPHINX_STATUS"
|
|
|
|
else
|
|
|
|
echo "$SPHINX_BUILD checks passed."
|
|
|
|
echo
|
|
|
|
fi
|
|
|
|
|
|
|
|
popd &> /dev/null
|
|
|
|
}
|
|
|
|
|
|
|
|
SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
|
|
|
|
SPARK_ROOT_DIR="$(dirname "${SCRIPT_DIR}")"
|
|
|
|
|
|
|
|
pushd "$SPARK_ROOT_DIR" &> /dev/null
|
|
|
|
|
|
|
|
PYTHON_SOURCE="$(find . -name "*.py")"
|
|
|
|
|
|
|
|
compile_python_test "$PYTHON_SOURCE"
|
|
|
|
pycodestyle_test "$PYTHON_SOURCE"
|
|
|
|
flake8_test
|
2020-10-19 15:50:01 -04:00
|
|
|
mypy_test
|
2018-11-20 15:38:40 -05:00
|
|
|
sphinx_test
|
|
|
|
|
|
|
|
echo
|
|
|
|
echo "all lint-python tests passed!"
|
|
|
|
|
|
|
|
popd &> /dev/null
|