Rename top-level 'pyspark' directory to 'python'

This commit is contained in:
Josh Rosen 2013-01-01 14:48:45 -08:00
parent 170e451fbd
commit b58340dbd9
28 changed files with 13 additions and 13 deletions

View file

@ -38,7 +38,7 @@ private[spark] class PythonRDD[T: ClassManifest](
override def compute(split: Split, context: TaskContext): Iterator[Array[Byte]] = {
val SPARK_HOME = new ProcessBuilder().environment().get("SPARK_HOME")
val pb = new ProcessBuilder(Seq(pythonExec, SPARK_HOME + "/pyspark/pyspark/worker.py"))
val pb = new ProcessBuilder(Seq(pythonExec, SPARK_HOME + "/python/pyspark/worker.py"))
// Add the environmental variables to the process.
val currentEnvVars = pb.environment()

View file

@ -30,8 +30,8 @@ if ENV['SKIP_SCALADOC'] != '1'
end
if ENV['SKIP_EPYDOC'] != '1'
puts "Moving to pyspark directory and building epydoc."
cd("../pyspark")
puts "Moving to python directory and building epydoc."
cd("../python")
puts `epydoc --config epydoc.conf`
puts "Moving back into docs dir."
@ -40,8 +40,8 @@ if ENV['SKIP_EPYDOC'] != '1'
puts "echo making directory pyspark"
mkdir_p "pyspark"
puts "cp -r ../pyspark/docs/. api/pyspark"
cp_r("../pyspark/docs/.", "api/pyspark")
puts "cp -r ../python/docs/. api/pyspark"
cp_r("../python/docs/.", "api/pyspark")
cd("..")
end

3
pyspark-shell Executable file
View file

@ -0,0 +1,3 @@
#!/usr/bin/env bash
FWDIR="`dirname $0`"
exec $FWDIR/run-pyspark $FWDIR/python/pyspark/shell.py "$@"

View file

@ -1,3 +0,0 @@
#!/usr/bin/env bash
FWDIR="`dirname $0`"
exec $FWDIR/run-pyspark $FWDIR/pyspark/shell.py "$@"

View file

@ -10,7 +10,7 @@ Public classes:
"""
import sys
import os
sys.path.insert(0, os.path.join(os.environ["SPARK_HOME"], "pyspark/lib/py4j0.7.egg"))
sys.path.insert(0, os.path.join(os.environ["SPARK_HOME"], "python/lib/py4j0.7.egg"))
from pyspark.context import SparkContext

2
run
View file

@ -63,7 +63,7 @@ CORE_DIR="$FWDIR/core"
REPL_DIR="$FWDIR/repl"
EXAMPLES_DIR="$FWDIR/examples"
BAGEL_DIR="$FWDIR/bagel"
PYSPARK_DIR="$FWDIR/pyspark"
PYSPARK_DIR="$FWDIR/python"
# Build up classpath
CLASSPATH="$SPARK_CLASSPATH"

View file

@ -1,7 +1,7 @@
#!/usr/bin/env bash
# Figure out where the Scala framework is installed
FWDIR="$(cd `dirname $0`; cd ../; pwd)"
FWDIR="$(cd `dirname $0`; pwd)"
# Export this as SPARK_HOME
export SPARK_HOME="$FWDIR"
@ -18,7 +18,7 @@ fi
export PYSPARK_PYTHON
# Add the PySpark classes to the Python path:
export PYTHONPATH=$SPARK_HOME/pyspark/:$PYTHONPATH
export PYTHONPATH=$SPARK_HOME/python/:$PYTHONPATH
# Launch with `scala` by default:
if [[ "$SPARK_LAUNCH_WITH_SCALA" != "0" ]] ; then

View file

@ -34,7 +34,7 @@ set CORE_DIR=%FWDIR%core
set REPL_DIR=%FWDIR%repl
set EXAMPLES_DIR=%FWDIR%examples
set BAGEL_DIR=%FWDIR%bagel
set PYSPARK_DIR=%FWDIR%pyspark
set PYSPARK_DIR=%FWDIR%python
rem Build up classpath
set CLASSPATH=%SPARK_CLASSPATH%;%MESOS_CLASSPATH%;%FWDIR%conf;%CORE_DIR%\target\scala-%SCALA_VERSION%\classes