Revert "[SPARK-13117][WEB UI] WebUI should use the local ip not 0.0.0.0"

This reverts commit 2e44031faf.
This commit is contained in:
Shixiong Zhu 2016-02-25 11:39:26 -08:00
parent 5fcf4c2bfc
commit 46f6e79316
2 changed files with 2 additions and 3 deletions

View file

@ -134,7 +134,7 @@ private[spark] abstract class WebUI(
def bind() {
assert(!serverInfo.isDefined, "Attempted to bind %s more than once!".format(className))
try {
serverInfo = Some(startJettyServer(publicHostName, port, sslOptions, handlers, conf, name))
serverInfo = Some(startJettyServer("0.0.0.0", port, sslOptions, handlers, conf, name))
logInfo("Started %s at http://%s:%d".format(className, publicHostName, boundPort))
} catch {
case e: Exception =>

View file

@ -26,7 +26,6 @@ import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSui
import org.apache.spark.scheduler.{SparkListener, SparkListenerExecutorAdded}
import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.util.SparkConfWithEnv
import org.apache.spark.util.Utils
class LogUrlsStandaloneSuite extends SparkFunSuite with LocalSparkContext {
@ -54,7 +53,7 @@ class LogUrlsStandaloneSuite extends SparkFunSuite with LocalSparkContext {
}
test("verify that log urls reflect SPARK_PUBLIC_DNS (SPARK-6175)") {
val SPARK_PUBLIC_DNS = Utils.localHostNameForURI()
val SPARK_PUBLIC_DNS = "public_dns"
val conf = new SparkConfWithEnv(Map("SPARK_PUBLIC_DNS" -> SPARK_PUBLIC_DNS)).set(
"spark.extraListeners", classOf[SaveExecutorInfo].getName)
sc = new SparkContext("local-cluster[2,1,1024]", "test", conf)