summaryrefslogtreecommitdiff
path: root/MPE/spark/bin/spark-class
diff options
context:
space:
mode:
Diffstat (limited to 'MPE/spark/bin/spark-class')
-rw-r--r--MPE/spark/bin/spark-class99
1 files changed, 99 insertions, 0 deletions
diff --git a/MPE/spark/bin/spark-class b/MPE/spark/bin/spark-class
new file mode 100644
index 0000000..65d3b96
--- /dev/null
+++ b/MPE/spark/bin/spark-class
@@ -0,0 +1,99 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
+. "${SPARK_HOME}"/bin/load-spark-env.sh
+
+# Find the java binary
+if [ -n "${JAVA_HOME}" ]; then
+ RUNNER="${JAVA_HOME}/bin/java"
+else
+ if [ "$(command -v java)" ]; then
+ RUNNER="java"
+ else
+ echo "JAVA_HOME is not set" >&2
+ exit 1
+ fi
+fi
+
+# Find Spark jars.
+if [ -d "${SPARK_HOME}/jars" ]; then
+ SPARK_JARS_DIR="${SPARK_HOME}/jars"
+else
+ SPARK_JARS_DIR="${SPARK_HOME}/assembly/target/scala-$SPARK_SCALA_VERSION/jars"
+fi
+
+if [ ! -d "$SPARK_JARS_DIR" ] && [ -z "$SPARK_TESTING$SPARK_SQL_TESTING" ]; then
+ echo "Failed to find Spark jars directory ($SPARK_JARS_DIR)." 1>&2
+ echo "You need to build Spark with the target \"package\" before running this program." 1>&2
+ exit 1
+else
+ LAUNCH_CLASSPATH="$SPARK_JARS_DIR/*"
+fi
+
+# Add the launcher build dir to the classpath if requested.
+if [ -n "$SPARK_PREPEND_CLASSES" ]; then
+ LAUNCH_CLASSPATH="${SPARK_HOME}/launcher/target/scala-$SPARK_SCALA_VERSION/classes:$LAUNCH_CLASSPATH"
+fi
+
+# For tests
+if [[ -n "$SPARK_TESTING" ]]; then
+ unset YARN_CONF_DIR
+ unset HADOOP_CONF_DIR
+fi
+
+# The launcher library will print arguments separated by a NULL character, to allow arguments with
+# characters that would be otherwise interpreted by the shell. Read that in a while loop, populating
+# an array that will be used to exec the final command.
+#
+# The exit code of the launcher is appended to the output, so the parent shell removes it from the
+# command array and checks the value to see if the launcher succeeded.
+build_command() {
+ "$RUNNER" -Xmx128m -cp "$LAUNCH_CLASSPATH" org.apache.spark.launcher.Main "$@"
+ printf "%d\0" $?
+}
+
+# Turn off posix mode since it does not allow process substitution
+set +o posix
+CMD=()
+while IFS= read -d '' -r ARG; do
+ CMD+=("$ARG")
+done < <(build_command "$@")
+
+COUNT=${#CMD[@]}
+LAST=$((COUNT - 1))
+LAUNCHER_EXIT_CODE=${CMD[$LAST]}
+
+# Certain JVM failures result in errors being printed to stdout (instead of stderr), which causes
+# the code that parses the output of the launcher to get confused. In those cases, check if the
+# exit code is an integer, and if it's not, handle it as a special error case.
+if ! [[ $LAUNCHER_EXIT_CODE =~ ^[0-9]+$ ]]; then
+ echo "${CMD[@]}" | head -n-1 1>&2
+ exit 1
+fi
+
+if [ $LAUNCHER_EXIT_CODE != 0 ]; then
+ exit $LAUNCHER_EXIT_CODE
+fi
+
+CMD=("${CMD[@]:0:$LAST}")
+exec "${CMD[@]}"