SPARK_IMAGE=$1 MASTER_CONTAINER=$2 START_PORT=$3 END_PORT=$4 WORKER_PORT=$5 WORKER_WEBUI_PORT=$6 HOSTNAME=$7 DATANODE_PORT=$8 ABS_PATH_CONF=$9 ABS_PATH_DATA=${10} docker run -d -v $ABS_PATH_CONF/worker:/conf \ -v $ABS_PATH_DATA:/tmp/data \ -h $HOSTNAME \ --name $HOSTNAME \ --network spark-net \ --link $MASTER_CONTAINER \ -p $WORKER_WEBUI_PORT:$WORKER_WEBUI_PORT \ -p $DATANODE_PORT:$DATANODE_PORT \ --expose $START_PORT-$END_PORT \ --expose $WORKER_PORT \ --expose $DATANODE_PORT \ -e "SPARK_CONF_DIR=/conf" \ -e "SPARK_PUBLIC_DNS=127.0.0.1" \ -e "SPARK_WORKER_CORES=4" \ -e "SPARK_WORKER_PORT=$WORKER_PORT" \ -e "SPARK_WORKER_WEBUI_PORT=$WORKER_WEBUI_PORT" \ -e "LD_LIBRARY_PATH=/usr/local/hadoop/lib/native/" \ -e "HDFS_DATA_HOST=$HOSTNAME" \ -e "HDFS_HOST=namenode" \ -e "HDFS_CONF_dfs_datanode_address=0.0.0.0:$DATANODE_PORT" \ -e "SPARK_EXECUTOR_MEMORY=8g" \ -e "SPARK_DAEMON_MEMORY=8g" \ -e "SPARK_DRIVER_MEMORY=8g" \ -e "SPARK_WORKER_MEMORY=8g" \ -e "HDFS_CONF_dfs_client_use_datanode_hostname=true" \ -e "AWS_ECS=false" \ $SPARK_IMAGE \ /usr/local/spark-2.2.0-bin-without-hadoop/worker.sh