[SPARK-1150] fix repo location in create script
https://spark-project.atlassian.net/browse/SPARK-1150 fix the repo location in create_release script Author: Mark Grover <mark@apache.org> Closes #48 from CodingCat/script_fixes and squashes the following commits: 01f4bf7 [Mark Grover] Fixing some nitpicks d2244d4 [Mark Grover] SPARK-676: Abbreviation in SPARK_MEM but not in SPARK_WORKER_MEMORY
This commit is contained in:
parent
556c56689b
commit
9aa0957118
|
@ -15,7 +15,7 @@
|
||||||
# - SPARK_MASTER_IP, to bind the master to a different IP address or hostname
|
# - SPARK_MASTER_IP, to bind the master to a different IP address or hostname
|
||||||
# - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports
|
# - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports
|
||||||
# - SPARK_WORKER_CORES, to set the number of cores to use on this machine
|
# - SPARK_WORKER_CORES, to set the number of cores to use on this machine
|
||||||
# - SPARK_WORKER_MEMORY, to set how much memory to use (e.g. 1000m, 2g)
|
# - SPARK_WORKER_MEM, to set how much memory to use (e.g. 1000m, 2g)
|
||||||
# - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT
|
# - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT
|
||||||
# - SPARK_WORKER_INSTANCES, to set the number of worker processes per node
|
# - SPARK_WORKER_INSTANCES, to set the number of worker processes per node
|
||||||
# - SPARK_WORKER_DIR, to set the working directory of worker processes
|
# - SPARK_WORKER_DIR, to set the working directory of worker processes
|
||||||
|
|
|
@ -18,13 +18,15 @@
|
||||||
package org.apache.spark.deploy.worker
|
package org.apache.spark.deploy.worker
|
||||||
|
|
||||||
import java.lang.management.ManagementFactory
|
import java.lang.management.ManagementFactory
|
||||||
|
import org.apache.spark.Logging
|
||||||
|
|
||||||
import org.apache.spark.util.{IntParam, MemoryParam, Utils}
|
import org.apache.spark.util.{IntParam, MemoryParam, Utils}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Command-line parser for the master.
|
* Command-line parser for the master.
|
||||||
*/
|
*/
|
||||||
private[spark] class WorkerArguments(args: Array[String]) {
|
private[spark] class WorkerArguments(args: Array[String]) extends Logging {
|
||||||
|
initLogging()
|
||||||
var host = Utils.localHostName()
|
var host = Utils.localHostName()
|
||||||
var port = 0
|
var port = 0
|
||||||
var webUiPort = 8081
|
var webUiPort = 8081
|
||||||
|
@ -40,9 +42,13 @@ private[spark] class WorkerArguments(args: Array[String]) {
|
||||||
if (System.getenv("SPARK_WORKER_CORES") != null) {
|
if (System.getenv("SPARK_WORKER_CORES") != null) {
|
||||||
cores = System.getenv("SPARK_WORKER_CORES").toInt
|
cores = System.getenv("SPARK_WORKER_CORES").toInt
|
||||||
}
|
}
|
||||||
if (System.getenv("SPARK_WORKER_MEMORY") != null) {
|
if (System.getenv("SPARK_WORKER_MEM") != null) {
|
||||||
|
memory = Utils.memoryStringToMb(System.getenv("SPARK_WORKER_MEM"))
|
||||||
|
} else if (System.getenv("SPARK_WORKER_MEMORY") != null) {
|
||||||
|
logWarning("SPARK_WORKER_MEMORY is deprecated. Please use SPARK_WORKER_MEM instead")
|
||||||
memory = Utils.memoryStringToMb(System.getenv("SPARK_WORKER_MEMORY"))
|
memory = Utils.memoryStringToMb(System.getenv("SPARK_WORKER_MEMORY"))
|
||||||
}
|
}
|
||||||
|
|
||||||
if (System.getenv("SPARK_WORKER_WEBUI_PORT") != null) {
|
if (System.getenv("SPARK_WORKER_WEBUI_PORT") != null) {
|
||||||
webUiPort = System.getenv("SPARK_WORKER_WEBUI_PORT").toInt
|
webUiPort = System.getenv("SPARK_WORKER_WEBUI_PORT").toInt
|
||||||
}
|
}
|
||||||
|
|
|
@ -104,8 +104,8 @@ You can optionally configure the cluster further by setting environment variable
|
||||||
<td>Total number of cores to allow Spark applications to use on the machine (default: all available cores).</td>
|
<td>Total number of cores to allow Spark applications to use on the machine (default: all available cores).</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td><code>SPARK_WORKER_MEMORY</code></td>
|
<td><code>SPARK_WORKER_MEM</code></td>
|
||||||
<td>Total amount of memory to allow Spark applications to use on the machine, e.g. <code>1000m</code>, <code>2g</code> (default: total memory minus 1 GB); note that each application's <i>individual</i> memory is configured using its <code>spark.executor.memory</code> property.</td>
|
<td>Total amount of memory to allow Spark applications to use on the machine, e.g. <code>1000m</code>, <code>2g</code> (default: total memory minus 1 GB); note that each application's <i>individual</i> memory is configured using its <code>spark.executor.memory</code> property. The old variable </code>SPARK_WORKER_MEMORY</code> has been deprecated.</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td><code>SPARK_WORKER_WEBUI_PORT</code></td>
|
<td><code>SPARK_WORKER_WEBUI_PORT</code></td>
|
||||||
|
|
Loading…
Reference in a new issue