summaryrefslogtreecommitdiff
path: root/MPE/spark/sbin
diff options
context:
space:
mode:
authorwangchengcheng <[email protected]>2023-07-27 15:43:51 +0800
committerwangchengcheng <[email protected]>2023-07-27 15:43:51 +0800
commit124f687daace8b85e5c74abac04bcd0a92744a8d (patch)
tree4f563326b1be67cfb51bf6a04f1ca4d953536e76 /MPE/spark/sbin
parent08686ae87f9efe7a590f48db74ed133b481c85b1 (diff)
P19 23.07 online-configP19
Diffstat (limited to 'MPE/spark/sbin')
-rw-r--r--MPE/spark/sbin/dae-sparkall.sh54
-rw-r--r--MPE/spark/sbin/set_spark_env.sh39
-rw-r--r--MPE/spark/sbin/slaves.sh103
-rw-r--r--MPE/spark/sbin/spark-config.sh33
-rw-r--r--MPE/spark/sbin/spark-daemon.sh242
-rw-r--r--MPE/spark/sbin/spark-daemons.sh36
-rw-r--r--MPE/spark/sbin/start-all.sh35
-rw-r--r--MPE/spark/sbin/start-history-server.sh34
-rw-r--r--MPE/spark/sbin/start-master.sh67
-rw-r--r--MPE/spark/sbin/start-mesos-dispatcher.sh51
-rw-r--r--MPE/spark/sbin/start-mesos-shuffle-service.sh36
-rw-r--r--MPE/spark/sbin/start-shuffle-service.sh34
-rw-r--r--MPE/spark/sbin/start-slave.sh91
-rw-r--r--MPE/spark/sbin/start-slaves.sh46
-rw-r--r--MPE/spark/sbin/start-thriftserver.sh56
-rw-r--r--MPE/spark/sbin/stop-all.sh49
-rw-r--r--MPE/spark/sbin/stop-history-server.sh26
-rw-r--r--MPE/spark/sbin/stop-master.sh28
-rw-r--r--MPE/spark/sbin/stop-mesos-dispatcher.sh33
-rw-r--r--MPE/spark/sbin/stop-mesos-shuffle-service.sh26
-rw-r--r--MPE/spark/sbin/stop-shuffle-service.sh26
-rw-r--r--MPE/spark/sbin/stop-slave.sh44
-rw-r--r--MPE/spark/sbin/stop-slaves.sh28
-rw-r--r--MPE/spark/sbin/stop-thriftserver.sh26
24 files changed, 1243 insertions, 0 deletions
diff --git a/MPE/spark/sbin/dae-sparkall.sh b/MPE/spark/sbin/dae-sparkall.sh
new file mode 100644
index 0000000..178561c
--- /dev/null
+++ b/MPE/spark/sbin/dae-sparkall.sh
@@ -0,0 +1,54 @@
+#!/bin/bash
+source /etc/profile
+
+BASE_DIR=/data/tsg/olap
+
+VERSION=spark-2.2.3-bin-hadoop2.7
+
+function set_log(){
+RES_SUM_FILE=$BASE_DIR/$VERSION/logs
+
+if [ ! -f "$RES_SUM_FILE/" ]
+then
+ mkdir -p $RES_SUM_FILE
+fi
+
+if [ ! -d "$RES_SUM_FILE/$1" ];then
+ echo "0" > $RES_SUM_FILE/$1
+fi
+
+OLD_NUM=`cat $RES_SUM_FILE/$1`
+RESTART_NUM=`expr $OLD_NUM + 1`
+echo $RESTART_NUM > $RES_SUM_FILE/$1
+if [ $OLD_NUM -eq "0" ];then
+ echo "`date "+%Y-%m-%d %H:%M:%S"` - Spark $2服务初次启动" >> $BASE_DIR/$VERSION/logs/restart.log
+else
+ echo "`date +%Y-%m-%d` `date +%H:%M:%S` - Spark $2服务异常 - 重启次数 -> $RESTART_NUM." >> $BASE_DIR/$VERSION/logs/restart.log
+fi
+}
+
+
+while true ; do
+
+HAS_MA=`jps -l | grep -w "org.apache.spark.deploy.master.Master" | grep -v grep |wc -l`
+HAS_HI=`jps -l | grep -w "org.apache.spark.deploy.history.HistoryServer" | grep -v grep |wc -l`
+HAS_WO=`jps -l | grep -w "org.apache.spark.deploy.worker.Worker" | grep -v grep |wc -l`
+
+
+if [ $HAS_MA -eq "0" ];then
+ $BASE_DIR/$VERSION/sbin/start-master.sh
+ set_log maRes_sum Master
+fi
+
+if [ $HAS_HI -eq "0" ];then
+ $BASE_DIR/$VERSION/sbin/start-history-server.sh
+ set_log hiRes_sum HistoryServer
+fi
+
+if [ $HAS_WO -eq "0" ];then
+ $BASE_DIR/$VERSION/sbin/start-slave.sh spark://192.168.20.223:7077
+ set_log woRes_sum Worker
+fi
+
+sleep 60
+done
diff --git a/MPE/spark/sbin/set_spark_env.sh b/MPE/spark/sbin/set_spark_env.sh
new file mode 100644
index 0000000..3a351b2
--- /dev/null
+++ b/MPE/spark/sbin/set_spark_env.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+
+echo -e "\n#spark\nexport SPARK_HOME=/data/tsg/olap/spark-2.2.3-bin-hadoop2.7\nexport PATH=\$SPARK_HOME/sbin:\$PATH\nexport PATH=\$SPARK_HOME/bin:\$PATH" >> /etc/profile.d/spark.sh
+chmod +x /etc/profile.d/spark.sh
+source /etc/profile
+
+keeppath='/etc/init.d/keepsparkall'
+if [ -x $keeppath ];then
+ chkconfig --add keepsparkall
+ chkconfig keepsparkall on
+ service keepsparkall start && sleep 5
+ all_dae=`ps -ef | grep dae-sparkall.sh | grep -v grep | wc -l`
+ if [ $all_dae -eq "0" ];then
+ nohup /data/tsg/olap/spark-2.2.3-bin-hadoop2.7/sbin/dae-sparkall.sh > /dev/null 2>&1 &
+ fi
+fi
+
+keeppath='/etc/init.d/keepsparkmaster'
+if [ -x $keeppath ];then
+ chkconfig --add keepsparkmaster
+ chkconfig keepsparkmaster on
+ service keepsparkmaster start && sleep 5
+ master_dae=`ps -ef | grep dae-sparkmaster.sh | grep -v grep | wc -l`
+ if [ $master_dae -eq "0" ];then
+ nohup /data/tsg/olap/spark-2.2.3-bin-hadoop2.7/sbin/dae-sparkmaster.sh > /dev/null 2>&1 &
+ fi
+fi
+
+keeppath='/etc/init.d/keepsparkworker'
+if [ -x $keeppath ];then
+ chkconfig --add keepsparkworker
+ chkconfig keepsparkworker on
+ service keepsparkworker start && sleep 5
+ worker_dae=`ps -ef | grep dae-sparkworker.sh | grep -v grep | wc -l`
+ if [ $worker_dae -eq "0" ];then
+ nohup /data/tsg/olap/spark-2.2.3-bin-hadoop2.7/sbin/dae-sparkworker.sh > /dev/null 2>&1 &
+ fi
+fi
+
diff --git a/MPE/spark/sbin/slaves.sh b/MPE/spark/sbin/slaves.sh
new file mode 100644
index 0000000..c971aa3
--- /dev/null
+++ b/MPE/spark/sbin/slaves.sh
@@ -0,0 +1,103 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Run a shell command on all slave hosts.
+#
+# Environment Variables
+#
+# SPARK_SLAVES File naming remote hosts.
+# Default is ${SPARK_CONF_DIR}/slaves.
+# SPARK_CONF_DIR Alternate conf dir. Default is ${SPARK_HOME}/conf.
+# SPARK_SLAVE_SLEEP Seconds to sleep between spawning remote commands.
+# SPARK_SSH_OPTS Options passed to ssh when running remote commands.
+##
+
+usage="Usage: slaves.sh [--config <conf-dir>] command..."
+
+# if no args specified, show usage
+if [ $# -le 0 ]; then
+ echo $usage
+ exit 1
+fi
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+# If the slaves file is specified in the command line,
+# then it takes precedence over the definition in
+# spark-env.sh. Save it here.
+if [ -f "$SPARK_SLAVES" ]; then
+ HOSTLIST=`cat "$SPARK_SLAVES"`
+fi
+
+# Check if --config is passed as an argument. It is an optional parameter.
+# Exit if the argument is not a directory.
+if [ "$1" == "--config" ]
+then
+ shift
+ conf_dir="$1"
+ if [ ! -d "$conf_dir" ]
+ then
+ echo "ERROR : $conf_dir is not a directory"
+ echo $usage
+ exit 1
+ else
+ export SPARK_CONF_DIR="$conf_dir"
+ fi
+ shift
+fi
+
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+if [ "$HOSTLIST" = "" ]; then
+ if [ "$SPARK_SLAVES" = "" ]; then
+ if [ -f "${SPARK_CONF_DIR}/slaves" ]; then
+ HOSTLIST=`cat "${SPARK_CONF_DIR}/slaves"`
+ else
+ HOSTLIST=localhost
+ fi
+ else
+ HOSTLIST=`cat "${SPARK_SLAVES}"`
+ fi
+fi
+
+
+
+# By default disable strict host key checking
+if [ "$SPARK_SSH_OPTS" = "" ]; then
+ SPARK_SSH_OPTS="-o StrictHostKeyChecking=no"
+fi
+
+for slave in `echo "$HOSTLIST"|sed "s/#.*$//;/^$/d"`; do
+ if [ -n "${SPARK_SSH_FOREGROUND}" ]; then
+ ssh $SPARK_SSH_OPTS "$slave" $"${@// /\\ }" \
+ 2>&1 | sed "s/^/$slave: /"
+ else
+ ssh $SPARK_SSH_OPTS "$slave" $"${@// /\\ }" \
+ 2>&1 | sed "s/^/$slave: /" &
+ fi
+ if [ "$SPARK_SLAVE_SLEEP" != "" ]; then
+ sleep $SPARK_SLAVE_SLEEP
+ fi
+done
+
+wait
diff --git a/MPE/spark/sbin/spark-config.sh b/MPE/spark/sbin/spark-config.sh
new file mode 100644
index 0000000..bf3da18
--- /dev/null
+++ b/MPE/spark/sbin/spark-config.sh
@@ -0,0 +1,33 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# included in all the spark scripts with source command
+# should not be executable directly
+# also should not be passed any arguments, since we need original $*
+
+# symlink and absolute path should rely on SPARK_HOME to resolve
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+export SPARK_CONF_DIR="${SPARK_CONF_DIR:-"${SPARK_HOME}/conf"}"
+# Add the PySpark classes to the PYTHONPATH:
+if [ -z "${PYSPARK_PYTHONPATH_SET}" ]; then
+ export PYTHONPATH="${SPARK_HOME}/python:${PYTHONPATH}"
+ export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.7-src.zip:${PYTHONPATH}"
+ export PYSPARK_PYTHONPATH_SET=1
+fi
diff --git a/MPE/spark/sbin/spark-daemon.sh b/MPE/spark/sbin/spark-daemon.sh
new file mode 100644
index 0000000..c227c98
--- /dev/null
+++ b/MPE/spark/sbin/spark-daemon.sh
@@ -0,0 +1,242 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Runs a Spark command as a daemon.
+#
+# Environment Variables
+#
+# SPARK_CONF_DIR Alternate conf dir. Default is ${SPARK_HOME}/conf.
+# SPARK_LOG_DIR Where log files are stored. ${SPARK_HOME}/logs by default.
+# SPARK_MASTER host:path where spark code should be rsync'd from
+# SPARK_PID_DIR The pid files are stored. /tmp by default.
+# SPARK_IDENT_STRING A string representing this instance of spark. $USER by default
+# SPARK_NICENESS The scheduling priority for daemons. Defaults to 0.
+# SPARK_NO_DAEMONIZE If set, will run the proposed command in the foreground. It will not output a PID file.
+##
+
+usage="Usage: spark-daemon.sh [--config <conf-dir>] (start|stop|submit|status) <spark-command> <spark-instance-number> <args...>"
+
+# if no args specified, show usage
+if [ $# -le 1 ]; then
+ echo $usage
+ exit 1
+fi
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+# get arguments
+
+# Check if --config is passed as an argument. It is an optional parameter.
+# Exit if the argument is not a directory.
+
+if [ "$1" == "--config" ]
+then
+ shift
+ conf_dir="$1"
+ if [ ! -d "$conf_dir" ]
+ then
+ echo "ERROR : $conf_dir is not a directory"
+ echo $usage
+ exit 1
+ else
+ export SPARK_CONF_DIR="$conf_dir"
+ fi
+ shift
+fi
+
+option=$1
+shift
+command=$1
+shift
+instance=$1
+shift
+
+spark_rotate_log ()
+{
+ log=$1;
+ num=5;
+ if [ -n "$2" ]; then
+ num=$2
+ fi
+ if [ -f "$log" ]; then # rotate logs
+ while [ $num -gt 1 ]; do
+ prev=`expr $num - 1`
+ [ -f "$log.$prev" ] && mv "$log.$prev" "$log.$num"
+ num=$prev
+ done
+ mv "$log" "$log.$num";
+ fi
+}
+
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+if [ "$SPARK_IDENT_STRING" = "" ]; then
+ export SPARK_IDENT_STRING="$USER"
+fi
+
+
+export SPARK_PRINT_LAUNCH_COMMAND="1"
+
+# get log directory
+if [ "$SPARK_LOG_DIR" = "" ]; then
+ export SPARK_LOG_DIR="${SPARK_HOME}/logs"
+fi
+mkdir -p "$SPARK_LOG_DIR"
+touch "$SPARK_LOG_DIR"/.spark_test > /dev/null 2>&1
+TEST_LOG_DIR=$?
+if [ "${TEST_LOG_DIR}" = "0" ]; then
+ rm -f "$SPARK_LOG_DIR"/.spark_test
+else
+ chown "$SPARK_IDENT_STRING" "$SPARK_LOG_DIR"
+fi
+
+if [ "$SPARK_PID_DIR" = "" ]; then
+ SPARK_PID_DIR=/tmp
+fi
+
+# some variables
+log="$SPARK_LOG_DIR/spark-$SPARK_IDENT_STRING-$command-$instance-$HOSTNAME.out"
+pid="$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command-$instance.pid"
+
+# Set default scheduling priority
+if [ "$SPARK_NICENESS" = "" ]; then
+ export SPARK_NICENESS=0
+fi
+
+execute_command() {
+ if [ -z ${SPARK_NO_DAEMONIZE+set} ]; then
+ nohup -- "$@" >> $log 2>&1 < /dev/null &
+ newpid="$!"
+
+ echo "$newpid" > "$pid"
+
+ # Poll for up to 5 seconds for the java process to start
+ for i in {1..10}
+ do
+ if [[ $(ps -p "$newpid" -o comm=) =~ "java" ]]; then
+ break
+ fi
+ sleep 0.5
+ done
+
+ sleep 2
+ # Check if the process has died; in that case we'll tail the log so the user can see
+ if [[ ! $(ps -p "$newpid" -o comm=) =~ "java" ]]; then
+ echo "failed to launch: $@"
+ tail -2 "$log" | sed 's/^/ /'
+ echo "full log in $log"
+ fi
+ else
+ "$@"
+ fi
+}
+
+run_command() {
+ mode="$1"
+ shift
+
+ mkdir -p "$SPARK_PID_DIR"
+
+ if [ -f "$pid" ]; then
+ TARGET_ID="$(cat "$pid")"
+ if [[ $(ps -p "$TARGET_ID" -o comm=) =~ "java" ]]; then
+ echo "$command running as process $TARGET_ID. Stop it first."
+ exit 1
+ fi
+ fi
+
+ if [ "$SPARK_MASTER" != "" ]; then
+ echo rsync from "$SPARK_MASTER"
+ rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' "$SPARK_MASTER/" "${SPARK_HOME}"
+ fi
+
+ spark_rotate_log "$log"
+ echo "starting $command, logging to $log"
+
+ case "$mode" in
+ (class)
+ execute_command nice -n "$SPARK_NICENESS" "${SPARK_HOME}"/bin/spark-class "$command" "$@"
+ ;;
+
+ (submit)
+ execute_command nice -n "$SPARK_NICENESS" bash "${SPARK_HOME}"/bin/spark-submit --class "$command" "$@"
+ ;;
+
+ (*)
+ echo "unknown mode: $mode"
+ exit 1
+ ;;
+ esac
+
+}
+
+case $option in
+
+ (submit)
+ run_command submit "$@"
+ ;;
+
+ (start)
+ run_command class "$@"
+ ;;
+
+ (stop)
+
+ if [ -f $pid ]; then
+ TARGET_ID="$(cat "$pid")"
+ if [[ $(ps -p "$TARGET_ID" -o comm=) =~ "java" ]]; then
+ echo "stopping $command"
+ kill "$TARGET_ID" && rm -f "$pid"
+ else
+ echo "no $command to stop"
+ fi
+ else
+ echo "no $command to stop"
+ fi
+ ;;
+
+ (status)
+
+ if [ -f $pid ]; then
+ TARGET_ID="$(cat "$pid")"
+ if [[ $(ps -p "$TARGET_ID" -o comm=) =~ "java" ]]; then
+ echo $command is running.
+ exit 0
+ else
+ echo $pid file is present but $command not running
+ exit 1
+ fi
+ else
+ echo $command not running.
+ exit 2
+ fi
+ ;;
+
+ (*)
+ echo $usage
+ exit 1
+ ;;
+
+esac
+
+
diff --git a/MPE/spark/sbin/spark-daemons.sh b/MPE/spark/sbin/spark-daemons.sh
new file mode 100644
index 0000000..dec2f44
--- /dev/null
+++ b/MPE/spark/sbin/spark-daemons.sh
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Run a Spark command on all slave hosts.
+
+usage="Usage: spark-daemons.sh [--config <conf-dir>] [start|stop] command instance-number args..."
+
+# if no args specified, show usage
+if [ $# -le 1 ]; then
+ echo $usage
+ exit 1
+fi
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+exec "${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin/spark-daemon.sh" "$@"
diff --git a/MPE/spark/sbin/start-all.sh b/MPE/spark/sbin/start-all.sh
new file mode 100644
index 0000000..a5d30d2
--- /dev/null
+++ b/MPE/spark/sbin/start-all.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Start all spark daemons.
+# Starts the master on this node.
+# Starts a worker on each node specified in conf/slaves
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+# Load the Spark configuration
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+# Start Master
+"${SPARK_HOME}/sbin"/start-master.sh
+
+# Start Workers
+"${SPARK_HOME}/sbin"/start-slaves.sh
diff --git a/MPE/spark/sbin/start-history-server.sh b/MPE/spark/sbin/start-history-server.sh
new file mode 100644
index 0000000..38a43b9
--- /dev/null
+++ b/MPE/spark/sbin/start-history-server.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Starts the history server on the machine this script is executed on.
+#
+# Usage: start-history-server.sh
+#
+# Use the SPARK_HISTORY_OPTS environment variable to set history server configuration.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1 "$@"
diff --git a/MPE/spark/sbin/start-master.sh b/MPE/spark/sbin/start-master.sh
new file mode 100644
index 0000000..97ee321
--- /dev/null
+++ b/MPE/spark/sbin/start-master.sh
@@ -0,0 +1,67 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Starts the master on the machine this script is executed on.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+# NOTE: This exact class name is matched downstream by SparkSubmit.
+# Any changes need to be reflected there.
+CLASS="org.apache.spark.deploy.master.Master"
+
+if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
+ echo "Usage: ./sbin/start-master.sh [options]"
+ pattern="Usage:"
+ pattern+="\|Using Spark's default log4j profile:"
+ pattern+="\|Registered signal handlers for"
+
+ "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
+ exit 1
+fi
+
+ORIGINAL_ARGS="$@"
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+if [ "$SPARK_MASTER_PORT" = "" ]; then
+ SPARK_MASTER_PORT=7077
+fi
+
+if [ "$SPARK_MASTER_HOST" = "" ]; then
+ case `uname` in
+ (SunOS)
+ SPARK_MASTER_HOST="`/usr/sbin/check-hostname | awk '{print $NF}'`"
+ ;;
+ (*)
+ SPARK_MASTER_HOST="`hostname -f`"
+ ;;
+ esac
+fi
+
+if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then
+ SPARK_MASTER_WEBUI_PORT=8080
+fi
+
+"${SPARK_HOME}/sbin"/spark-daemon.sh start $CLASS 1 \
+ --host $SPARK_MASTER_HOST --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT \
+ $ORIGINAL_ARGS
diff --git a/MPE/spark/sbin/start-mesos-dispatcher.sh b/MPE/spark/sbin/start-mesos-dispatcher.sh
new file mode 100644
index 0000000..ecaad7a
--- /dev/null
+++ b/MPE/spark/sbin/start-mesos-dispatcher.sh
@@ -0,0 +1,51 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Starts the Mesos Cluster Dispatcher on the machine this script is executed on.
+# The Mesos Cluster Dispatcher is responsible for launching the Mesos framework and
+# Rest server to handle driver requests for Mesos cluster mode.
+# Only one cluster dispatcher is needed per Mesos cluster.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+if [ "$SPARK_MESOS_DISPATCHER_PORT" = "" ]; then
+ SPARK_MESOS_DISPATCHER_PORT=7077
+fi
+
+if [ "$SPARK_MESOS_DISPATCHER_HOST" = "" ]; then
+ case `uname` in
+ (SunOS)
+ SPARK_MESOS_DISPATCHER_HOST="`/usr/sbin/check-hostname | awk '{print $NF}'`"
+ ;;
+ (*)
+ SPARK_MESOS_DISPATCHER_HOST="`hostname -f`"
+ ;;
+ esac
+fi
+
+if [ "$SPARK_MESOS_DISPATCHER_NUM" = "" ]; then
+ SPARK_MESOS_DISPATCHER_NUM=1
+fi
+
+"${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.mesos.MesosClusterDispatcher $SPARK_MESOS_DISPATCHER_NUM --host $SPARK_MESOS_DISPATCHER_HOST --port $SPARK_MESOS_DISPATCHER_PORT "$@"
diff --git a/MPE/spark/sbin/start-mesos-shuffle-service.sh b/MPE/spark/sbin/start-mesos-shuffle-service.sh
new file mode 100644
index 0000000..1845845
--- /dev/null
+++ b/MPE/spark/sbin/start-mesos-shuffle-service.sh
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Starts the Mesos external shuffle server on the machine this script is executed on.
+# The Mesos external shuffle service detects when an application exits and automatically
+# cleans up its shuffle files.
+#
+# Usage: start-mesos-shuffle-server.sh
+#
+# Use the SPARK_SHUFFLE_OPTS environment variable to set shuffle service configuration.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.mesos.MesosExternalShuffleService 1
diff --git a/MPE/spark/sbin/start-shuffle-service.sh b/MPE/spark/sbin/start-shuffle-service.sh
new file mode 100644
index 0000000..793e165
--- /dev/null
+++ b/MPE/spark/sbin/start-shuffle-service.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Starts the external shuffle server on the machine this script is executed on.
+#
+# Usage: start-shuffle-server.sh
+#
+# Use the SPARK_SHUFFLE_OPTS environment variable to set shuffle server configuration.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.ExternalShuffleService 1
diff --git a/MPE/spark/sbin/start-slave.sh b/MPE/spark/sbin/start-slave.sh
new file mode 100644
index 0000000..a22c00d
--- /dev/null
+++ b/MPE/spark/sbin/start-slave.sh
@@ -0,0 +1,91 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Starts a slave on the machine this script is executed on.
+#
+# Environment Variables
+#
+# SPARK_WORKER_INSTANCES The number of worker instances to run on this
+# slave. Default is 1.
+# SPARK_WORKER_PORT The base port number for the first worker. If set,
+# subsequent workers will increment this number. If
+# unset, Spark will find a valid port number, but
+# with no guarantee of a predictable pattern.
+# SPARK_WORKER_WEBUI_PORT The base port for the web interface of the first
+# worker. Subsequent workers will increment this
+# number. Default is 8081.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+# NOTE: This exact class name is matched downstream by SparkSubmit.
+# Any changes need to be reflected there.
+CLASS="org.apache.spark.deploy.worker.Worker"
+
+if [[ $# -lt 1 ]] || [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
+ echo "Usage: ./sbin/start-slave.sh [options] <master>"
+ pattern="Usage:"
+ pattern+="\|Using Spark's default log4j profile:"
+ pattern+="\|Registered signal handlers for"
+
+ "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
+ exit 1
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+# First argument should be the master; we need to store it aside because we may
+# need to insert arguments between it and the other arguments
+MASTER=$1
+shift
+
+# Determine desired worker port
+if [ "$SPARK_WORKER_WEBUI_PORT" = "" ]; then
+ SPARK_WORKER_WEBUI_PORT=7081
+fi
+
+# Start up the appropriate number of workers on this machine.
+# quick local function to start a worker
+function start_instance {
+ WORKER_NUM=$1
+ shift
+
+ if [ "$SPARK_WORKER_PORT" = "" ]; then
+ PORT_FLAG=
+ PORT_NUM=
+ else
+ PORT_FLAG="--port"
+ PORT_NUM=$(( $SPARK_WORKER_PORT + $WORKER_NUM - 1 ))
+ fi
+ WEBUI_PORT=$(( $SPARK_WORKER_WEBUI_PORT + $WORKER_NUM - 1 ))
+
+ "${SPARK_HOME}/sbin"/spark-daemon.sh start $CLASS $WORKER_NUM \
+ --webui-port "$WEBUI_PORT" $PORT_FLAG $PORT_NUM $MASTER "$@"
+}
+
+if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
+ start_instance 1 "$@"
+else
+ for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
+ start_instance $(( 1 + $i )) "$@"
+ done
+fi
diff --git a/MPE/spark/sbin/start-slaves.sh b/MPE/spark/sbin/start-slaves.sh
new file mode 100644
index 0000000..f5269df
--- /dev/null
+++ b/MPE/spark/sbin/start-slaves.sh
@@ -0,0 +1,46 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Starts a slave instance on each machine specified in the conf/slaves file.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+# Find the port number for the master
+if [ "$SPARK_MASTER_PORT" = "" ]; then
+ SPARK_MASTER_PORT=7077
+fi
+
+if [ "$SPARK_MASTER_HOST" = "" ]; then
+ case `uname` in
+ (SunOS)
+ SPARK_MASTER_HOST="`/usr/sbin/check-hostname | awk '{print $NF}'`"
+ ;;
+ (*)
+ SPARK_MASTER_HOST="`hostname -f`"
+ ;;
+ esac
+fi
+
+# Launch the slaves
+"${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin/start-slave.sh" "spark://$SPARK_MASTER_HOST:$SPARK_MASTER_PORT"
diff --git a/MPE/spark/sbin/start-thriftserver.sh b/MPE/spark/sbin/start-thriftserver.sh
new file mode 100644
index 0000000..f02f317
--- /dev/null
+++ b/MPE/spark/sbin/start-thriftserver.sh
@@ -0,0 +1,56 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Shell script for starting the Spark SQL Thrift server
+
+# Enter posix mode for bash
+set -o posix
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+# NOTE: This exact class name is matched downstream by SparkSubmit.
+# Any changes need to be reflected there.
+CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
+
+function usage {
+ echo "Usage: ./sbin/start-thriftserver [options] [thrift server options]"
+ pattern="usage"
+ pattern+="\|Spark assembly has been built with Hive"
+ pattern+="\|NOTE: SPARK_PREPEND_CLASSES is set"
+ pattern+="\|Spark Command: "
+ pattern+="\|======="
+ pattern+="\|--help"
+
+ "${SPARK_HOME}"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
+ echo
+ echo "Thrift server options:"
+ "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
+}
+
+if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
+ usage
+ exit 0
+fi
+
+export SUBMIT_USAGE_FUNCTION=usage
+
+exec "${SPARK_HOME}"/sbin/spark-daemon.sh submit $CLASS 1 --name "Thrift JDBC/ODBC Server" "$@"
diff --git a/MPE/spark/sbin/stop-all.sh b/MPE/spark/sbin/stop-all.sh
new file mode 100644
index 0000000..4e476ca
--- /dev/null
+++ b/MPE/spark/sbin/stop-all.sh
@@ -0,0 +1,49 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Stop all spark daemons.
+# Run this on the master node.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+# Load the Spark configuration
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+# Stop the slaves, then the master
+"${SPARK_HOME}/sbin"/stop-slaves.sh
+"${SPARK_HOME}/sbin"/stop-master.sh
+
+if [ "$1" == "--wait" ]
+then
+ printf "Waiting for workers to shut down..."
+ while true
+ do
+ running=`${SPARK_HOME}/sbin/slaves.sh ps -ef | grep -v grep | grep deploy.worker.Worker`
+ if [ -z "$running" ]
+ then
+ printf "\nAll workers successfully shut down.\n"
+ break
+ else
+ printf "."
+ sleep 10
+ fi
+ done
+fi
diff --git a/MPE/spark/sbin/stop-history-server.sh b/MPE/spark/sbin/stop-history-server.sh
new file mode 100644
index 0000000..14e3af4
--- /dev/null
+++ b/MPE/spark/sbin/stop-history-server.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Stops the history server on the machine this script is executed on.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+"${SPARK_HOME}/sbin/spark-daemon.sh" stop org.apache.spark.deploy.history.HistoryServer 1
diff --git a/MPE/spark/sbin/stop-master.sh b/MPE/spark/sbin/stop-master.sh
new file mode 100644
index 0000000..14644ea
--- /dev/null
+++ b/MPE/spark/sbin/stop-master.sh
@@ -0,0 +1,28 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Stops the master on the machine this script is executed on.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
diff --git a/MPE/spark/sbin/stop-mesos-dispatcher.sh b/MPE/spark/sbin/stop-mesos-dispatcher.sh
new file mode 100644
index 0000000..b13e018
--- /dev/null
+++ b/MPE/spark/sbin/stop-mesos-dispatcher.sh
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Stop the Mesos Cluster dispatcher on the machine this script is executed on.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+if [ "$SPARK_MESOS_DISPATCHER_NUM" = "" ]; then
+ SPARK_MESOS_DISPATCHER_NUM=1
+fi
+
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.mesos.MesosClusterDispatcher \
+ $SPARK_MESOS_DISPATCHER_NUM
+
diff --git a/MPE/spark/sbin/stop-mesos-shuffle-service.sh b/MPE/spark/sbin/stop-mesos-shuffle-service.sh
new file mode 100644
index 0000000..d23cad3
--- /dev/null
+++ b/MPE/spark/sbin/stop-mesos-shuffle-service.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Stops the Mesos external shuffle service on the machine this script is executed on.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.mesos.MesosExternalShuffleService 1
diff --git a/MPE/spark/sbin/stop-shuffle-service.sh b/MPE/spark/sbin/stop-shuffle-service.sh
new file mode 100644
index 0000000..50d69cf
--- /dev/null
+++ b/MPE/spark/sbin/stop-shuffle-service.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Stops the external shuffle service on the machine this script is executed on.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.ExternalShuffleService 1
diff --git a/MPE/spark/sbin/stop-slave.sh b/MPE/spark/sbin/stop-slave.sh
new file mode 100644
index 0000000..685bcf5
--- /dev/null
+++ b/MPE/spark/sbin/stop-slave.sh
@@ -0,0 +1,44 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# A shell script to stop all workers on a single slave
+#
+# Environment variables
+#
+# SPARK_WORKER_INSTANCES The number of worker instances that should be
+# running on this slave. Default is 1.
+
+# Usage: stop-slave.sh
+# Stops all slaves on this worker machine
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
+ "${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker 1
+else
+ for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
+ "${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
+ done
+fi
diff --git a/MPE/spark/sbin/stop-slaves.sh b/MPE/spark/sbin/stop-slaves.sh
new file mode 100644
index 0000000..a57441b
--- /dev/null
+++ b/MPE/spark/sbin/stop-slaves.sh
@@ -0,0 +1,28 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+"${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin"/stop-slave.sh
diff --git a/MPE/spark/sbin/stop-thriftserver.sh b/MPE/spark/sbin/stop-thriftserver.sh
new file mode 100644
index 0000000..cf45058
--- /dev/null
+++ b/MPE/spark/sbin/stop-thriftserver.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Stops the thrift server on the machine this script is executed on.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 1