- Removed config files from git's control.

- Changed DfsShuffle to default in RDD.scala.
This commit is contained in:
Mosharaf Chowdhury 2010-12-07 10:17:47 -08:00
parent 1013a01469
commit 06dc4a5148
4 changed files with 1 additions and 23 deletions

View file

@ -1 +0,0 @@
-Dspark.broadcast.MasterHostAddress=127.0.0.1 -Dspark.broadcast.MasterTrackerPort=11111 -Dspark.broadcast.BlockSize=256 -Dspark.broadcast.MaxRetryCount=2 -Dspark.broadcast.TrackerSocketTimeout=50000 -Dspark.broadcast.ServerSocketTimeout=10000 -Dspark.broadcast.MaxChatTime=500 -Dspark.broadcast.EndGameFraction=0.95 -Dspark.broadcast.Factory=spark.ChainedBroadcastFactory

View file

@ -1,8 +0,0 @@
# Set everything to be logged to the console
log4j.rootCategory=INFO, console
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %p %c{1}: %m%n
# Ignore messages below warning level from Jetty, because it's a bit verbose
log4j.logger.org.eclipse.jetty=WARN

View file

@ -1,13 +0,0 @@
#!/usr/bin/env bash
# Set Spark environment variables for your site in this file. Some useful
# variables to set are:
# - MESOS_HOME, to point to your Mesos installation
# - SCALA_HOME, to point to your Scala installation
# - SPARK_CLASSPATH, to add elements to Spark's classpath
# - SPARK_JAVA_OPTS, to add JVM options
# - SPARK_MEM, to change the amount of memory used per node (this should
# be in the same format as the JVM's -Xmx option, e.g. 300m or 1g).
# - SPARK_LIBRARY_PATH, to add extra search paths for native libraries.
MESOS_HOME=/home/mosharaf/Work/mesos

View file

@ -359,7 +359,7 @@ extends RDD[Pair[T, U]](sc) {
: RDD[(K, C)] =
{
val shufClass = Class.forName(System.getProperty(
"spark.shuffle.class", "spark.LocalFileShuffle"))
"spark.shuffle.class", "spark.DfsShuffle"))
val shuf = shufClass.newInstance().asInstanceOf[Shuffle[K, V, C]]
shuf.compute(self, numSplits, createCombiner, mergeValue, mergeCombiners)
}