summaryrefslogtreecommitdiff
path: root/MPE/spark
diff options
context:
space:
mode:
Diffstat (limited to 'MPE/spark')
-rw-r--r--MPE/spark/bin/beeline32
-rw-r--r--MPE/spark/bin/beeline.cmd20
-rw-r--r--MPE/spark/bin/find-spark-home41
-rw-r--r--MPE/spark/bin/find-spark-home.cmd60
-rw-r--r--MPE/spark/bin/load-spark-env.cmd59
-rw-r--r--MPE/spark/bin/load-spark-env.sh63
-rw-r--r--MPE/spark/bin/pyspark77
-rw-r--r--MPE/spark/bin/pyspark.cmd23
-rw-r--r--MPE/spark/bin/pyspark2.cmd38
-rw-r--r--MPE/spark/bin/run-example25
-rw-r--r--MPE/spark/bin/run-example.cmd24
-rw-r--r--MPE/spark/bin/spark-class99
-rw-r--r--MPE/spark/bin/spark-class.cmd23
-rw-r--r--MPE/spark/bin/spark-class2.cmd72
-rw-r--r--MPE/spark/bin/spark-shell95
-rw-r--r--MPE/spark/bin/spark-shell.cmd23
-rw-r--r--MPE/spark/bin/spark-shell2.cmd37
-rw-r--r--MPE/spark/bin/spark-sql25
-rw-r--r--MPE/spark/bin/spark-submit27
-rw-r--r--MPE/spark/bin/spark-submit.cmd23
-rw-r--r--MPE/spark/bin/spark-submit2.cmd27
-rw-r--r--MPE/spark/bin/sparkR26
-rw-r--r--MPE/spark/bin/sparkR.cmd23
-rw-r--r--MPE/spark/bin/sparkR2.cmd26
-rw-r--r--MPE/spark/conf/computer.lnkbin0 -> 335 bytes
-rw-r--r--MPE/spark/conf/docker.properties.template20
-rw-r--r--MPE/spark/conf/fairscheduler.xml.template31
-rw-r--r--MPE/spark/conf/log4j-task.properties22
-rw-r--r--MPE/spark/conf/log4j.properties40
-rw-r--r--MPE/spark/conf/log4j.properties.template40
-rw-r--r--MPE/spark/conf/metrics.properties.template170
-rw-r--r--MPE/spark/conf/slaves3
-rw-r--r--MPE/spark/conf/slaves.template19
-rw-r--r--MPE/spark/conf/spark-defaults.conf39
-rw-r--r--MPE/spark/conf/spark-defaults.conf.template29
-rw-r--r--MPE/spark/conf/spark-env.sh83
-rw-r--r--MPE/spark/conf/spark-env.sh.template63
-rw-r--r--MPE/spark/iplearning/application.properties44
-rw-r--r--MPE/spark/iplearning/ip-learning-spark.jarbin0 -> 125276478 bytes
-rw-r--r--MPE/spark/iplearning/iplearning.sh53
-rw-r--r--MPE/spark/iplearning/iplearning_monitor.sh56
-rw-r--r--MPE/spark/iplearning/logs/IpLearningApplication/running-2023-07.log9683
-rw-r--r--MPE/spark/iplearning/logs/IpRecommendApplication/running-2023-07.log9640
-rw-r--r--MPE/spark/iplearning/logs/SubscriberRecommendApplication/running-2023-07.log9673
-rw-r--r--MPE/spark/iplearning/runnum1
-rw-r--r--MPE/spark/iplearning/upconfig.sh7
-rw-r--r--MPE/spark/sbin/dae-sparkall.sh54
-rw-r--r--MPE/spark/sbin/set_spark_env.sh39
-rw-r--r--MPE/spark/sbin/slaves.sh103
-rw-r--r--MPE/spark/sbin/spark-config.sh33
-rw-r--r--MPE/spark/sbin/spark-daemon.sh242
-rw-r--r--MPE/spark/sbin/spark-daemons.sh36
-rw-r--r--MPE/spark/sbin/start-all.sh35
-rw-r--r--MPE/spark/sbin/start-history-server.sh34
-rw-r--r--MPE/spark/sbin/start-master.sh67
-rw-r--r--MPE/spark/sbin/start-mesos-dispatcher.sh51
-rw-r--r--MPE/spark/sbin/start-mesos-shuffle-service.sh36
-rw-r--r--MPE/spark/sbin/start-shuffle-service.sh34
-rw-r--r--MPE/spark/sbin/start-slave.sh91
-rw-r--r--MPE/spark/sbin/start-slaves.sh46
-rw-r--r--MPE/spark/sbin/start-thriftserver.sh56
-rw-r--r--MPE/spark/sbin/stop-all.sh49
-rw-r--r--MPE/spark/sbin/stop-history-server.sh26
-rw-r--r--MPE/spark/sbin/stop-master.sh28
-rw-r--r--MPE/spark/sbin/stop-mesos-dispatcher.sh33
-rw-r--r--MPE/spark/sbin/stop-mesos-shuffle-service.sh26
-rw-r--r--MPE/spark/sbin/stop-shuffle-service.sh26
-rw-r--r--MPE/spark/sbin/stop-slave.sh44
-rw-r--r--MPE/spark/sbin/stop-slaves.sh28
-rw-r--r--MPE/spark/sbin/stop-thriftserver.sh26
70 files changed, 31947 insertions, 0 deletions
diff --git a/MPE/spark/bin/beeline b/MPE/spark/bin/beeline
new file mode 100644
index 0000000..0585346
--- /dev/null
+++ b/MPE/spark/bin/beeline
@@ -0,0 +1,32 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Shell script for starting BeeLine
+
+# Enter posix mode for bash
+set -o posix
+
+# Figure out if SPARK_HOME is set
+if [ -z "${SPARK_HOME}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
+CLASS="org.apache.hive.beeline.BeeLine"
+exec "${SPARK_HOME}/bin/spark-class" $CLASS "$@"
diff --git a/MPE/spark/bin/beeline.cmd b/MPE/spark/bin/beeline.cmd
new file mode 100644
index 0000000..02464bd
--- /dev/null
+++ b/MPE/spark/bin/beeline.cmd
@@ -0,0 +1,20 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+cmd /V /E /C "%~dp0spark-class.cmd" org.apache.hive.beeline.BeeLine %*
diff --git a/MPE/spark/bin/find-spark-home b/MPE/spark/bin/find-spark-home
new file mode 100644
index 0000000..fa78407
--- /dev/null
+++ b/MPE/spark/bin/find-spark-home
@@ -0,0 +1,41 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Attempts to find a proper value for SPARK_HOME. Should be included using "source" directive.
+
+FIND_SPARK_HOME_PYTHON_SCRIPT="$(cd "$(dirname "$0")"; pwd)/find_spark_home.py"
+
+# Short cirtuit if the user already has this set.
+if [ ! -z "${SPARK_HOME}" ]; then
+ exit 0
+elif [ ! -f "$FIND_SPARK_HOME_PYTHON_SCRIPT" ]; then
+ # If we are not in the same directory as find_spark_home.py we are not pip installed so we don't
+ # need to search the different Python directories for a Spark installation.
+ # Note only that, if the user has pip installed PySpark but is directly calling pyspark-shell or
+ # spark-submit in another directory we want to use that version of PySpark rather than the
+ # pip installed version of PySpark.
+ export SPARK_HOME="$(cd "$(dirname "$0")"/..; pwd)"
+else
+ # We are pip installed, use the Python script to resolve a reasonable SPARK_HOME
+ # Default to standard python interpreter unless told otherwise
+ if [[ -z "$PYSPARK_DRIVER_PYTHON" ]]; then
+ PYSPARK_DRIVER_PYTHON="${PYSPARK_PYTHON:-"python"}"
+ fi
+ export SPARK_HOME=$($PYSPARK_DRIVER_PYTHON "$FIND_SPARK_HOME_PYTHON_SCRIPT")
+fi
diff --git a/MPE/spark/bin/find-spark-home.cmd b/MPE/spark/bin/find-spark-home.cmd
new file mode 100644
index 0000000..6025f67
--- /dev/null
+++ b/MPE/spark/bin/find-spark-home.cmd
@@ -0,0 +1,60 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem Path to Python script finding SPARK_HOME
+set FIND_SPARK_HOME_PYTHON_SCRIPT=%~dp0find_spark_home.py
+
+rem Default to standard python interpreter unless told otherwise
+set PYTHON_RUNNER=python
+rem If PYSPARK_DRIVER_PYTHON is set, it overwrites the python version
+if not "x%PYSPARK_DRIVER_PYTHON%"=="x" (
+ set PYTHON_RUNNER=%PYSPARK_DRIVER_PYTHON%
+)
+rem If PYSPARK_PYTHON is set, it overwrites the python version
+if not "x%PYSPARK_PYTHON%"=="x" (
+ set PYTHON_RUNNER=%PYSPARK_PYTHON%
+)
+
+rem If there is python installed, trying to use the root dir as SPARK_HOME
+where %PYTHON_RUNNER% > nul 2>&1
+if %ERRORLEVEL% neq 0 (
+ if not exist %PYTHON_RUNNER% (
+ if "x%SPARK_HOME%"=="x" (
+ echo Missing Python executable '%PYTHON_RUNNER%', defaulting to '%~dp0..' for SPARK_HOME ^
+environment variable. Please install Python or specify the correct Python executable in ^
+PYSPARK_DRIVER_PYTHON or PYSPARK_PYTHON environment variable to detect SPARK_HOME safely.
+ set SPARK_HOME=%~dp0..
+ )
+ )
+)
+
+rem Only attempt to find SPARK_HOME if it is not set.
+if "x%SPARK_HOME%"=="x" (
+ if not exist "%FIND_SPARK_HOME_PYTHON_SCRIPT%" (
+ rem If we are not in the same directory as find_spark_home.py we are not pip installed so we don't
+ rem need to search the different Python directories for a Spark installation.
+ rem Note only that, if the user has pip installed PySpark but is directly calling pyspark-shell or
+ rem spark-submit in another directory we want to use that version of PySpark rather than the
+ rem pip installed version of PySpark.
+ set SPARK_HOME=%~dp0..
+ ) else (
+ rem We are pip installed, use the Python script to resolve a reasonable SPARK_HOME
+ for /f "delims=" %%i in ('%PYTHON_RUNNER% %FIND_SPARK_HOME_PYTHON_SCRIPT%') do set SPARK_HOME=%%i
+ )
+)
diff --git a/MPE/spark/bin/load-spark-env.cmd b/MPE/spark/bin/load-spark-env.cmd
new file mode 100644
index 0000000..0977025
--- /dev/null
+++ b/MPE/spark/bin/load-spark-env.cmd
@@ -0,0 +1,59 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem This script loads spark-env.cmd if it exists, and ensures it is only loaded once.
+rem spark-env.cmd is loaded from SPARK_CONF_DIR if set, or within the current directory's
+rem conf/ subdirectory.
+
+if [%SPARK_ENV_LOADED%] == [] (
+ set SPARK_ENV_LOADED=1
+
+ if not [%SPARK_CONF_DIR%] == [] (
+ set user_conf_dir=%SPARK_CONF_DIR%
+ ) else (
+ set user_conf_dir=..\conf
+ )
+
+ call :LoadSparkEnv
+)
+
+rem Setting SPARK_SCALA_VERSION if not already set.
+
+set ASSEMBLY_DIR2="%SPARK_HOME%\assembly\target\scala-2.11"
+set ASSEMBLY_DIR1="%SPARK_HOME%\assembly\target\scala-2.10"
+
+if [%SPARK_SCALA_VERSION%] == [] (
+
+ if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
+ echo "Presence of build for both scala versions(SCALA 2.10 and SCALA 2.11) detected."
+ echo "Either clean one of them or, set SPARK_SCALA_VERSION=2.11 in spark-env.cmd."
+ exit 1
+ )
+ if exist %ASSEMBLY_DIR2% (
+ set SPARK_SCALA_VERSION=2.11
+ ) else (
+ set SPARK_SCALA_VERSION=2.10
+ )
+)
+exit /b 0
+
+:LoadSparkEnv
+if exist "%user_conf_dir%\spark-env.cmd" (
+ call "%user_conf_dir%\spark-env.cmd"
+)
diff --git a/MPE/spark/bin/load-spark-env.sh b/MPE/spark/bin/load-spark-env.sh
new file mode 100644
index 0000000..8a2f709
--- /dev/null
+++ b/MPE/spark/bin/load-spark-env.sh
@@ -0,0 +1,63 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This script loads spark-env.sh if it exists, and ensures it is only loaded once.
+# spark-env.sh is loaded from SPARK_CONF_DIR if set, or within the current directory's
+# conf/ subdirectory.
+
+# Figure out where Spark is installed
+if [ -z "${SPARK_HOME}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
+if [ -z "$SPARK_ENV_LOADED" ]; then
+ export SPARK_ENV_LOADED=1
+
+ # Returns the parent of the directory this script lives in.
+ parent_dir="${SPARK_HOME}"
+
+ user_conf_dir="${SPARK_CONF_DIR:-"$parent_dir"/conf}"
+
+ if [ -f "${user_conf_dir}/spark-env.sh" ]; then
+ # Promote all variable declarations to environment (exported) variables
+ set -a
+ . "${user_conf_dir}/spark-env.sh"
+ set +a
+ fi
+fi
+
+# Setting SPARK_SCALA_VERSION if not already set.
+
+if [ -z "$SPARK_SCALA_VERSION" ]; then
+
+ ASSEMBLY_DIR2="${SPARK_HOME}/assembly/target/scala-2.11"
+ ASSEMBLY_DIR1="${SPARK_HOME}/assembly/target/scala-2.10"
+
+ if [[ -d "$ASSEMBLY_DIR2" && -d "$ASSEMBLY_DIR1" ]]; then
+ echo -e "Presence of build for both scala versions(SCALA 2.10 and SCALA 2.11) detected." 1>&2
+ echo -e 'Either clean one of them or, export SPARK_SCALA_VERSION=2.11 in spark-env.sh.' 1>&2
+ exit 1
+ fi
+
+ if [ -d "$ASSEMBLY_DIR2" ]; then
+ export SPARK_SCALA_VERSION="2.11"
+ else
+ export SPARK_SCALA_VERSION="2.10"
+ fi
+fi
diff --git a/MPE/spark/bin/pyspark b/MPE/spark/bin/pyspark
new file mode 100644
index 0000000..95ab628
--- /dev/null
+++ b/MPE/spark/bin/pyspark
@@ -0,0 +1,77 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
+source "${SPARK_HOME}"/bin/load-spark-env.sh
+export _SPARK_CMD_USAGE="Usage: ./bin/pyspark [options]"
+
+# In Spark 2.0, IPYTHON and IPYTHON_OPTS are removed and pyspark fails to launch if either option
+# is set in the user's environment. Instead, users should set PYSPARK_DRIVER_PYTHON=ipython
+# to use IPython and set PYSPARK_DRIVER_PYTHON_OPTS to pass options when starting the Python driver
+# (e.g. PYSPARK_DRIVER_PYTHON_OPTS='notebook'). This supports full customization of the IPython
+# and executor Python executables.
+
+# Fail noisily if removed options are set
+if [[ -n "$IPYTHON" || -n "$IPYTHON_OPTS" ]]; then
+ echo "Error in pyspark startup:"
+ echo "IPYTHON and IPYTHON_OPTS are removed in Spark 2.0+. Remove these from the environment and set PYSPARK_DRIVER_PYTHON and PYSPARK_DRIVER_PYTHON_OPTS instead."
+ exit 1
+fi
+
+# Default to standard python interpreter unless told otherwise
+if [[ -z "$PYSPARK_DRIVER_PYTHON" ]]; then
+ PYSPARK_DRIVER_PYTHON="${PYSPARK_PYTHON:-"python"}"
+fi
+
+WORKS_WITH_IPYTHON=$(python -c 'import sys; print(sys.version_info >= (2, 7, 0))')
+
+# Determine the Python executable to use for the executors:
+if [[ -z "$PYSPARK_PYTHON" ]]; then
+ if [[ $PYSPARK_DRIVER_PYTHON == *ipython* && ! $WORKS_WITH_IPYTHON ]]; then
+ echo "IPython requires Python 2.7+; please install python2.7 or set PYSPARK_PYTHON" 1>&2
+ exit 1
+ else
+ PYSPARK_PYTHON=python
+ fi
+fi
+export PYSPARK_PYTHON
+
+# Add the PySpark classes to the Python path:
+export PYTHONPATH="${SPARK_HOME}/python/:$PYTHONPATH"
+export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.7-src.zip:$PYTHONPATH"
+
+# Load the PySpark shell.py script when ./pyspark is used interactively:
+export OLD_PYTHONSTARTUP="$PYTHONSTARTUP"
+export PYTHONSTARTUP="${SPARK_HOME}/python/pyspark/shell.py"
+
+# For pyspark tests
+if [[ -n "$SPARK_TESTING" ]]; then
+ unset YARN_CONF_DIR
+ unset HADOOP_CONF_DIR
+ export PYTHONHASHSEED=0
+ exec "$PYSPARK_DRIVER_PYTHON" -m "$1"
+ exit
+fi
+
+export PYSPARK_DRIVER_PYTHON
+export PYSPARK_DRIVER_PYTHON_OPTS
+exec "${SPARK_HOME}"/bin/spark-submit pyspark-shell-main --name "PySparkShell" "$@"
diff --git a/MPE/spark/bin/pyspark.cmd b/MPE/spark/bin/pyspark.cmd
new file mode 100644
index 0000000..72d046a
--- /dev/null
+++ b/MPE/spark/bin/pyspark.cmd
@@ -0,0 +1,23 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem This is the entry point for running PySpark. To avoid polluting the
+rem environment, it just launches a new cmd to do the real work.
+
+cmd /V /E /C "%~dp0pyspark2.cmd" %*
diff --git a/MPE/spark/bin/pyspark2.cmd b/MPE/spark/bin/pyspark2.cmd
new file mode 100644
index 0000000..15fa910
--- /dev/null
+++ b/MPE/spark/bin/pyspark2.cmd
@@ -0,0 +1,38 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem Figure out where the Spark framework is installed
+call "%~dp0find-spark-home.cmd"
+
+call "%SPARK_HOME%\bin\load-spark-env.cmd"
+set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options]
+
+rem Figure out which Python to use.
+if "x%PYSPARK_DRIVER_PYTHON%"=="x" (
+ set PYSPARK_DRIVER_PYTHON=python
+ if not [%PYSPARK_PYTHON%] == [] set PYSPARK_DRIVER_PYTHON=%PYSPARK_PYTHON%
+)
+
+set PYTHONPATH=%SPARK_HOME%\python;%PYTHONPATH%
+set PYTHONPATH=%SPARK_HOME%\python\lib\py4j-0.10.7-src.zip;%PYTHONPATH%
+
+set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
+set PYTHONSTARTUP=%SPARK_HOME%\python\pyspark\shell.py
+
+call "%SPARK_HOME%\bin\spark-submit2.cmd" pyspark-shell-main --name "PySparkShell" %*
diff --git a/MPE/spark/bin/run-example b/MPE/spark/bin/run-example
new file mode 100644
index 0000000..4ba5399
--- /dev/null
+++ b/MPE/spark/bin/run-example
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
+export _SPARK_CMD_USAGE="Usage: ./bin/run-example [options] example-class [example args]"
+exec "${SPARK_HOME}"/bin/spark-submit run-example "$@"
diff --git a/MPE/spark/bin/run-example.cmd b/MPE/spark/bin/run-example.cmd
new file mode 100644
index 0000000..7cfaa7e
--- /dev/null
+++ b/MPE/spark/bin/run-example.cmd
@@ -0,0 +1,24 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem Figure out where the Spark framework is installed
+call "%~dp0find-spark-home.cmd"
+
+set _SPARK_CMD_USAGE=Usage: ./bin/run-example [options] example-class [example args]
+cmd /V /E /C "%~dp0spark-submit.cmd" run-example %*
diff --git a/MPE/spark/bin/spark-class b/MPE/spark/bin/spark-class
new file mode 100644
index 0000000..65d3b96
--- /dev/null
+++ b/MPE/spark/bin/spark-class
@@ -0,0 +1,99 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
+. "${SPARK_HOME}"/bin/load-spark-env.sh
+
+# Find the java binary
+if [ -n "${JAVA_HOME}" ]; then
+ RUNNER="${JAVA_HOME}/bin/java"
+else
+ if [ "$(command -v java)" ]; then
+ RUNNER="java"
+ else
+ echo "JAVA_HOME is not set" >&2
+ exit 1
+ fi
+fi
+
+# Find Spark jars.
+if [ -d "${SPARK_HOME}/jars" ]; then
+ SPARK_JARS_DIR="${SPARK_HOME}/jars"
+else
+ SPARK_JARS_DIR="${SPARK_HOME}/assembly/target/scala-$SPARK_SCALA_VERSION/jars"
+fi
+
+if [ ! -d "$SPARK_JARS_DIR" ] && [ -z "$SPARK_TESTING$SPARK_SQL_TESTING" ]; then
+ echo "Failed to find Spark jars directory ($SPARK_JARS_DIR)." 1>&2
+ echo "You need to build Spark with the target \"package\" before running this program." 1>&2
+ exit 1
+else
+ LAUNCH_CLASSPATH="$SPARK_JARS_DIR/*"
+fi
+
+# Add the launcher build dir to the classpath if requested.
+if [ -n "$SPARK_PREPEND_CLASSES" ]; then
+ LAUNCH_CLASSPATH="${SPARK_HOME}/launcher/target/scala-$SPARK_SCALA_VERSION/classes:$LAUNCH_CLASSPATH"
+fi
+
+# For tests
+if [[ -n "$SPARK_TESTING" ]]; then
+ unset YARN_CONF_DIR
+ unset HADOOP_CONF_DIR
+fi
+
+# The launcher library will print arguments separated by a NULL character, to allow arguments with
+# characters that would be otherwise interpreted by the shell. Read that in a while loop, populating
+# an array that will be used to exec the final command.
+#
+# The exit code of the launcher is appended to the output, so the parent shell removes it from the
+# command array and checks the value to see if the launcher succeeded.
+build_command() {
+ "$RUNNER" -Xmx128m -cp "$LAUNCH_CLASSPATH" org.apache.spark.launcher.Main "$@"
+ printf "%d\0" $?
+}
+
+# Turn off posix mode since it does not allow process substitution
+set +o posix
+CMD=()
+while IFS= read -d '' -r ARG; do
+ CMD+=("$ARG")
+done < <(build_command "$@")
+
+COUNT=${#CMD[@]}
+LAST=$((COUNT - 1))
+LAUNCHER_EXIT_CODE=${CMD[$LAST]}
+
+# Certain JVM failures result in errors being printed to stdout (instead of stderr), which causes
+# the code that parses the output of the launcher to get confused. In those cases, check if the
+# exit code is an integer, and if it's not, handle it as a special error case.
+if ! [[ $LAUNCHER_EXIT_CODE =~ ^[0-9]+$ ]]; then
+ echo "${CMD[@]}" | head -n-1 1>&2
+ exit 1
+fi
+
+if [ $LAUNCHER_EXIT_CODE != 0 ]; then
+ exit $LAUNCHER_EXIT_CODE
+fi
+
+CMD=("${CMD[@]:0:$LAST}")
+exec "${CMD[@]}"
diff --git a/MPE/spark/bin/spark-class.cmd b/MPE/spark/bin/spark-class.cmd
new file mode 100644
index 0000000..3bf3d20
--- /dev/null
+++ b/MPE/spark/bin/spark-class.cmd
@@ -0,0 +1,23 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem This is the entry point for running a Spark class. To avoid polluting
+rem the environment, it just launches a new cmd to do the real work.
+
+cmd /V /E /C "%~dp0spark-class2.cmd" %*
diff --git a/MPE/spark/bin/spark-class2.cmd b/MPE/spark/bin/spark-class2.cmd
new file mode 100644
index 0000000..5da7d7a
--- /dev/null
+++ b/MPE/spark/bin/spark-class2.cmd
@@ -0,0 +1,72 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem Figure out where the Spark framework is installed
+call "%~dp0find-spark-home.cmd"
+
+call "%SPARK_HOME%\bin\load-spark-env.cmd"
+
+rem Test that an argument was given
+if "x%1"=="x" (
+ echo Usage: spark-class ^<class^> [^<args^>]
+ exit /b 1
+)
+
+rem Find Spark jars.
+if exist "%SPARK_HOME%\jars" (
+ set SPARK_JARS_DIR="%SPARK_HOME%\jars"
+) else (
+ set SPARK_JARS_DIR="%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%\jars"
+)
+
+if not exist "%SPARK_JARS_DIR%"\ (
+ echo Failed to find Spark jars directory.
+ echo You need to build Spark before running this program.
+ exit /b 1
+)
+
+set LAUNCH_CLASSPATH=%SPARK_JARS_DIR%\*
+
+rem Add the launcher build dir to the classpath if requested.
+if not "x%SPARK_PREPEND_CLASSES%"=="x" (
+ set LAUNCH_CLASSPATH="%SPARK_HOME%\launcher\target\scala-%SPARK_SCALA_VERSION%\classes;%LAUNCH_CLASSPATH%"
+)
+
+rem Figure out where java is.
+set RUNNER=java
+if not "x%JAVA_HOME%"=="x" (
+ set RUNNER=%JAVA_HOME%\bin\java
+) else (
+ where /q "%RUNNER%"
+ if ERRORLEVEL 1 (
+ echo Java not found and JAVA_HOME environment variable is not set.
+ echo Install Java and set JAVA_HOME to point to the Java installation directory.
+ exit /b 1
+ )
+)
+
+rem The launcher library prints the command to be executed in a single line suitable for being
+rem executed by the batch interpreter. So read all the output of the launcher into a variable.
+set LAUNCHER_OUTPUT=%temp%\spark-class-launcher-output-%RANDOM%.txt
+"%RUNNER%" -Xmx128m -cp "%LAUNCH_CLASSPATH%" org.apache.spark.launcher.Main %* > %LAUNCHER_OUTPUT%
+for /f "tokens=*" %%i in (%LAUNCHER_OUTPUT%) do (
+ set SPARK_CMD=%%i
+)
+del %LAUNCHER_OUTPUT%
+%SPARK_CMD%
diff --git a/MPE/spark/bin/spark-shell b/MPE/spark/bin/spark-shell
new file mode 100644
index 0000000..421f36c
--- /dev/null
+++ b/MPE/spark/bin/spark-shell
@@ -0,0 +1,95 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Shell script for starting the Spark Shell REPL
+
+cygwin=false
+case "$(uname)" in
+ CYGWIN*) cygwin=true;;
+esac
+
+# Enter posix mode for bash
+set -o posix
+
+if [ -z "${SPARK_HOME}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
+export _SPARK_CMD_USAGE="Usage: ./bin/spark-shell [options]"
+
+# SPARK-4161: scala does not assume use of the java classpath,
+# so we need to add the "-Dscala.usejavacp=true" flag manually. We
+# do this specifically for the Spark shell because the scala REPL
+# has its own class loader, and any additional classpath specified
+# through spark.driver.extraClassPath is not automatically propagated.
+SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Dscala.usejavacp=true"
+
+function main() {
+ if $cygwin; then
+ # Workaround for issue involving JLine and Cygwin
+ # (see http://sourceforge.net/p/jline/bugs/40/).
+ # If you're using the Mintty terminal emulator in Cygwin, may need to set the
+ # "Backspace sends ^H" setting in "Keys" section of the Mintty options
+ # (see https://github.com/sbt/sbt/issues/562).
+ stty -icanon min 1 -echo > /dev/null 2>&1
+ export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Djline.terminal=unix"
+ "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark shell" "$@"
+ stty icanon echo > /dev/null 2>&1
+ else
+ export SPARK_SUBMIT_OPTS
+ "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark shell" "$@"
+ fi
+}
+
+# Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in
+# binary distribution of Spark where Scala is not installed
+exit_status=127
+saved_stty=""
+
+# restore stty settings (echo in particular)
+function restoreSttySettings() {
+ stty $saved_stty
+ saved_stty=""
+}
+
+function onExit() {
+ if [[ "$saved_stty" != "" ]]; then
+ restoreSttySettings
+ fi
+ exit $exit_status
+}
+
+# to reenable echo if we are interrupted before completing.
+trap onExit INT
+
+# save terminal settings
+saved_stty=$(stty -g 2>/dev/null)
+# clear on error so we don't later try to restore them
+if [[ ! $? ]]; then
+ saved_stty=""
+fi
+
+main "$@"
+
+# record the exit status lest it be overwritten:
+# then reenable echo and propagate the code.
+exit_status=$?
+onExit
+
diff --git a/MPE/spark/bin/spark-shell.cmd b/MPE/spark/bin/spark-shell.cmd
new file mode 100644
index 0000000..991423d
--- /dev/null
+++ b/MPE/spark/bin/spark-shell.cmd
@@ -0,0 +1,23 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem This is the entry point for running Spark shell. To avoid polluting the
+rem environment, it just launches a new cmd to do the real work.
+
+cmd /V /E /C "%~dp0spark-shell2.cmd" %*
diff --git a/MPE/spark/bin/spark-shell2.cmd b/MPE/spark/bin/spark-shell2.cmd
new file mode 100644
index 0000000..aaf7190
--- /dev/null
+++ b/MPE/spark/bin/spark-shell2.cmd
@@ -0,0 +1,37 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem Figure out where the Spark framework is installed
+call "%~dp0find-spark-home.cmd"
+
+set _SPARK_CMD_USAGE=Usage: .\bin\spark-shell.cmd [options]
+
+rem SPARK-4161: scala does not assume use of the java classpath,
+rem so we need to add the "-Dscala.usejavacp=true" flag manually. We
+rem do this specifically for the Spark shell because the scala REPL
+rem has its own class loader, and any additional classpath specified
+rem through spark.driver.extraClassPath is not automatically propagated.
+if "x%SPARK_SUBMIT_OPTS%"=="x" (
+ set SPARK_SUBMIT_OPTS=-Dscala.usejavacp=true
+ goto run_shell
+)
+set SPARK_SUBMIT_OPTS="%SPARK_SUBMIT_OPTS% -Dscala.usejavacp=true"
+
+:run_shell
+"%SPARK_HOME%\bin\spark-submit2.cmd" --class org.apache.spark.repl.Main --name "Spark shell" %*
diff --git a/MPE/spark/bin/spark-sql b/MPE/spark/bin/spark-sql
new file mode 100644
index 0000000..b08b944
--- /dev/null
+++ b/MPE/spark/bin/spark-sql
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
+export _SPARK_CMD_USAGE="Usage: ./bin/spark-sql [options] [cli option]"
+exec "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver "$@"
diff --git a/MPE/spark/bin/spark-submit b/MPE/spark/bin/spark-submit
new file mode 100644
index 0000000..4e9d361
--- /dev/null
+++ b/MPE/spark/bin/spark-submit
@@ -0,0 +1,27 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
+# disable randomized hash for string in Python 3.3+
+export PYTHONHASHSEED=0
+
+exec "${SPARK_HOME}"/bin/spark-class org.apache.spark.deploy.SparkSubmit "$@"
diff --git a/MPE/spark/bin/spark-submit.cmd b/MPE/spark/bin/spark-submit.cmd
new file mode 100644
index 0000000..f301606
--- /dev/null
+++ b/MPE/spark/bin/spark-submit.cmd
@@ -0,0 +1,23 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem This is the entry point for running Spark submit. To avoid polluting the
+rem environment, it just launches a new cmd to do the real work.
+
+cmd /V /E /C "%~dp0spark-submit2.cmd" %*
diff --git a/MPE/spark/bin/spark-submit2.cmd b/MPE/spark/bin/spark-submit2.cmd
new file mode 100644
index 0000000..49e350f
--- /dev/null
+++ b/MPE/spark/bin/spark-submit2.cmd
@@ -0,0 +1,27 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem This is the entry point for running Spark submit. To avoid polluting the
+rem environment, it just launches a new cmd to do the real work.
+
+rem disable randomized hash for string in Python 3.3+
+set PYTHONHASHSEED=0
+
+set CLASS=org.apache.spark.deploy.SparkSubmit
+"%~dp0spark-class2.cmd" %CLASS% %*
diff --git a/MPE/spark/bin/sparkR b/MPE/spark/bin/sparkR
new file mode 100644
index 0000000..29ab10d
--- /dev/null
+++ b/MPE/spark/bin/sparkR
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
+source "${SPARK_HOME}"/bin/load-spark-env.sh
+export _SPARK_CMD_USAGE="Usage: ./bin/sparkR [options]"
+exec "${SPARK_HOME}"/bin/spark-submit sparkr-shell-main "$@"
diff --git a/MPE/spark/bin/sparkR.cmd b/MPE/spark/bin/sparkR.cmd
new file mode 100644
index 0000000..1e5ea6a
--- /dev/null
+++ b/MPE/spark/bin/sparkR.cmd
@@ -0,0 +1,23 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem This is the entry point for running SparkR. To avoid polluting the
+rem environment, it just launches a new cmd to do the real work.
+
+cmd /V /E /C "%~dp0sparkR2.cmd" %*
diff --git a/MPE/spark/bin/sparkR2.cmd b/MPE/spark/bin/sparkR2.cmd
new file mode 100644
index 0000000..b48bea3
--- /dev/null
+++ b/MPE/spark/bin/sparkR2.cmd
@@ -0,0 +1,26 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem Figure out where the Spark framework is installed
+call "%~dp0find-spark-home.cmd"
+
+call "%SPARK_HOME%\bin\load-spark-env.cmd"
+
+
+call "%SPARK_HOME%\bin\spark-submit2.cmd" sparkr-shell-main %*
diff --git a/MPE/spark/conf/computer.lnk b/MPE/spark/conf/computer.lnk
new file mode 100644
index 0000000..82d545a
--- /dev/null
+++ b/MPE/spark/conf/computer.lnk
Binary files differ
diff --git a/MPE/spark/conf/docker.properties.template b/MPE/spark/conf/docker.properties.template
new file mode 100644
index 0000000..2ecb4f1
--- /dev/null
+++ b/MPE/spark/conf/docker.properties.template
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+spark.mesos.executor.docker.image: <image built from `../external/docker/spark-mesos/Dockerfile`>
+spark.mesos.executor.docker.volumes: /usr/local/lib:/host/usr/local/lib:ro
+spark.mesos.executor.home: /opt/spark
diff --git a/MPE/spark/conf/fairscheduler.xml.template b/MPE/spark/conf/fairscheduler.xml.template
new file mode 100644
index 0000000..385b2e7
--- /dev/null
+++ b/MPE/spark/conf/fairscheduler.xml.template
@@ -0,0 +1,31 @@
+<?xml version="1.0"?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<allocations>
+ <pool name="production">
+ <schedulingMode>FAIR</schedulingMode>
+ <weight>1</weight>
+ <minShare>2</minShare>
+ </pool>
+ <pool name="test">
+ <schedulingMode>FIFO</schedulingMode>
+ <weight>2</weight>
+ <minShare>3</minShare>
+ </pool>
+</allocations>
diff --git a/MPE/spark/conf/log4j-task.properties b/MPE/spark/conf/log4j-task.properties
new file mode 100644
index 0000000..75bca58
--- /dev/null
+++ b/MPE/spark/conf/log4j-task.properties
@@ -0,0 +1,22 @@
+######################### logger ##############################
+log4j.logger.org.apache.http=OFF
+log4j.logger.org.apache.http.wire=OFF
+
+#Log4j
+log4j.rootLogger=error,console,file
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.Threshold=warn
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH:mm:ssZ}] [%-5p] [Thread\:%t] %l %x - <%m>%n
+
+
+log4j.appender.file=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.file.Threshold=warn
+log4j.appender.file.encoding=UTF-8
+log4j.appender.file.Append=true
+
+log4j.appender.file.file=/data/tsg/olap/spark-2.2.3-bin-hadoop2.7/logs/ip-learning-application.log
+log4j.appender.file.DatePattern='.'yyyy-MM-dd
+log4j.appender.file.layout=org.apache.log4j.PatternLayout
+log4j.appender.file.layout.ConversionPattern=[%d{yyyy-MM-dd HH:mm:ssZ}] [%-5p] %X{ip} [Thread\:%t] %l %x - %m%n
diff --git a/MPE/spark/conf/log4j.properties b/MPE/spark/conf/log4j.properties
new file mode 100644
index 0000000..0fb4737
--- /dev/null
+++ b/MPE/spark/conf/log4j.properties
@@ -0,0 +1,40 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the console
+log4j.rootCategory=WARN, console
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH:mm:ssZ}] %p %c{1}: %m%n
+
+# Set the default spark-shell log level to WARN. When running the spark-shell, the
+# log level for this class is used to overwrite the root logger's log level, so that
+# the user can have different defaults for the shell and regular Spark apps.
+log4j.logger.org.apache.spark.repl.Main=WARN
+
+# Settings to quiet third party logs that are too verbose
+log4j.logger.org.spark_project.jetty=WARN
+log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR
+log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
+log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
+log4j.logger.org.apache.parquet=ERROR
+log4j.logger.parquet=ERROR
+
+# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support
+log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
+log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
diff --git a/MPE/spark/conf/log4j.properties.template b/MPE/spark/conf/log4j.properties.template
new file mode 100644
index 0000000..ec1aa18
--- /dev/null
+++ b/MPE/spark/conf/log4j.properties.template
@@ -0,0 +1,40 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the console
+log4j.rootCategory=INFO, console
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+
+# Set the default spark-shell log level to WARN. When running the spark-shell, the
+# log level for this class is used to overwrite the root logger's log level, so that
+# the user can have different defaults for the shell and regular Spark apps.
+log4j.logger.org.apache.spark.repl.Main=WARN
+
+# Settings to quiet third party logs that are too verbose
+log4j.logger.org.spark_project.jetty=WARN
+log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR
+log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
+log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
+log4j.logger.org.apache.parquet=ERROR
+log4j.logger.parquet=ERROR
+
+# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support
+log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
+log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
diff --git a/MPE/spark/conf/metrics.properties.template b/MPE/spark/conf/metrics.properties.template
new file mode 100644
index 0000000..aeb76c9
--- /dev/null
+++ b/MPE/spark/conf/metrics.properties.template
@@ -0,0 +1,170 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# syntax: [instance].sink|source.[name].[options]=[value]
+
+# This file configures Spark's internal metrics system. The metrics system is
+# divided into instances which correspond to internal components.
+# Each instance can be configured to report its metrics to one or more sinks.
+# Accepted values for [instance] are "master", "worker", "executor", "driver",
+# and "applications". A wildcard "*" can be used as an instance name, in
+# which case all instances will inherit the supplied property.
+#
+# Within an instance, a "source" specifies a particular set of grouped metrics.
+# there are two kinds of sources:
+# 1. Spark internal sources, like MasterSource, WorkerSource, etc, which will
+# collect a Spark component's internal state. Each instance is paired with a
+# Spark source that is added automatically.
+# 2. Common sources, like JvmSource, which will collect low level state.
+# These can be added through configuration options and are then loaded
+# using reflection.
+#
+# A "sink" specifies where metrics are delivered to. Each instance can be
+# assigned one or more sinks.
+#
+# The sink|source field specifies whether the property relates to a sink or
+# source.
+#
+# The [name] field specifies the name of source or sink.
+#
+# The [options] field is the specific property of this source or sink. The
+# source or sink is responsible for parsing this property.
+#
+# Notes:
+# 1. To add a new sink, set the "class" option to a fully qualified class
+# name (see examples below).
+# 2. Some sinks involve a polling period. The minimum allowed polling period
+# is 1 second.
+# 3. Wildcard properties can be overridden by more specific properties.
+# For example, master.sink.console.period takes precedence over
+# *.sink.console.period.
+# 4. A metrics specific configuration
+# "spark.metrics.conf=${SPARK_HOME}/conf/metrics.properties" should be
+# added to Java properties using -Dspark.metrics.conf=xxx if you want to
+# customize metrics system. You can also put the file in ${SPARK_HOME}/conf
+# and it will be loaded automatically.
+# 5. The MetricsServlet sink is added by default as a sink in the master,
+# worker and driver, and you can send HTTP requests to the "/metrics/json"
+# endpoint to get a snapshot of all the registered metrics in JSON format.
+# For master, requests to the "/metrics/master/json" and
+# "/metrics/applications/json" endpoints can be sent separately to get
+# metrics snapshots of the master instance and applications. This
+# MetricsServlet does not have to be configured.
+
+## List of available common sources and their properties.
+
+# org.apache.spark.metrics.source.JvmSource
+# Note: Currently, JvmSource is the only available common source.
+# It can be added to an instance by setting the "class" option to its
+# fully qualified class name (see examples below).
+
+## List of available sinks and their properties.
+
+# org.apache.spark.metrics.sink.ConsoleSink
+# Name: Default: Description:
+# period 10 Poll period
+# unit seconds Unit of the poll period
+
+# org.apache.spark.metrics.sink.CSVSink
+# Name: Default: Description:
+# period 10 Poll period
+# unit seconds Unit of the poll period
+# directory /tmp Where to store CSV files
+
+# org.apache.spark.metrics.sink.GangliaSink
+# Name: Default: Description:
+# host NONE Hostname or multicast group of the Ganglia server,
+# must be set
+# port NONE Port of the Ganglia server(s), must be set
+# period 10 Poll period
+# unit seconds Unit of the poll period
+# ttl 1 TTL of messages sent by Ganglia
+# dmax 0 Lifetime in seconds of metrics (0 never expired)
+# mode multicast Ganglia network mode ('unicast' or 'multicast')
+
+# org.apache.spark.metrics.sink.JmxSink
+
+# org.apache.spark.metrics.sink.MetricsServlet
+# Name: Default: Description:
+# path VARIES* Path prefix from the web server root
+# sample false Whether to show entire set of samples for histograms
+# ('false' or 'true')
+#
+# * Default path is /metrics/json for all instances except the master. The
+# master has two paths:
+# /metrics/applications/json # App information
+# /metrics/master/json # Master information
+
+# org.apache.spark.metrics.sink.GraphiteSink
+# Name: Default: Description:
+# host NONE Hostname of the Graphite server, must be set
+# port NONE Port of the Graphite server, must be set
+# period 10 Poll period
+# unit seconds Unit of the poll period
+# prefix EMPTY STRING Prefix to prepend to every metric's name
+# protocol tcp Protocol ("tcp" or "udp") to use
+
+## Examples
+# Enable JmxSink for all instances by class name
+#*.sink.jmx.class=org.apache.spark.metrics.sink.JmxSink
+
+# Enable ConsoleSink for all instances by class name
+#*.sink.console.class=org.apache.spark.metrics.sink.ConsoleSink
+
+# Polling period for the ConsoleSink
+#*.sink.console.period=10
+# Unit of the polling period for the ConsoleSink
+#*.sink.console.unit=seconds
+
+# Polling period for the ConsoleSink specific for the master instance
+#master.sink.console.period=15
+# Unit of the polling period for the ConsoleSink specific for the master
+# instance
+#master.sink.console.unit=seconds
+
+# Enable CsvSink for all instances by class name
+#*.sink.csv.class=org.apache.spark.metrics.sink.CsvSink
+
+# Polling period for the CsvSink
+#*.sink.csv.period=1
+# Unit of the polling period for the CsvSink
+#*.sink.csv.unit=minutes
+
+# Polling directory for CsvSink
+#*.sink.csv.directory=/tmp/
+
+# Polling period for the CsvSink specific for the worker instance
+#worker.sink.csv.period=10
+# Unit of the polling period for the CsvSink specific for the worker instance
+#worker.sink.csv.unit=minutes
+
+# Enable Slf4jSink for all instances by class name
+#*.sink.slf4j.class=org.apache.spark.metrics.sink.Slf4jSink
+
+# Polling period for the Slf4JSink
+#*.sink.slf4j.period=1
+# Unit of the polling period for the Slf4jSink
+#*.sink.slf4j.unit=minutes
+
+# Enable JvmSource for instance master, worker, driver and executor
+#master.source.jvm.class=org.apache.spark.metrics.source.JvmSource
+
+#worker.source.jvm.class=org.apache.spark.metrics.source.JvmSource
+
+#driver.source.jvm.class=org.apache.spark.metrics.source.JvmSource
+
+#executor.source.jvm.class=org.apache.spark.metrics.source.JvmSource
diff --git a/MPE/spark/conf/slaves b/MPE/spark/conf/slaves
new file mode 100644
index 0000000..3db059e
--- /dev/null
+++ b/MPE/spark/conf/slaves
@@ -0,0 +1,3 @@
+192.168.20.223
+192.168.20.224
+192.168.20.225
diff --git a/MPE/spark/conf/slaves.template b/MPE/spark/conf/slaves.template
new file mode 100644
index 0000000..be42a63
--- /dev/null
+++ b/MPE/spark/conf/slaves.template
@@ -0,0 +1,19 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# A Spark Worker will be started on each of the machines listed below.
+localhost \ No newline at end of file
diff --git a/MPE/spark/conf/spark-defaults.conf b/MPE/spark/conf/spark-defaults.conf
new file mode 100644
index 0000000..77218e0
--- /dev/null
+++ b/MPE/spark/conf/spark-defaults.conf
@@ -0,0 +1,39 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Default system properties included when running spark-submit.
+# This is useful for setting default environmental settings.
+
+# Example:
+# spark.master spark://master:7077
+# spark.eventLog.enabled true
+# spark.eventLog.dir hdfs://namenode:8021/directory
+# spark.serializer org.apache.spark.serializer.KryoSerializer
+# spark.driver.memory 5g
+# spark.executor.extraJavaOptions -XX:+PrintGCDetails -Dkey=value -Dnumbers="one two three"
+#
+
+spark.eventLog.enabled true
+spark.local.dir /data/tsg/olap/spark-2.2.3-bin-hadoop2.7/tmp/spark-events
+spark.history.fs.logDirectory file:/data/tsg/olap/spark-2.2.3-bin-hadoop2.7/tmp/spark-events
+spark.eventLog.dir file:/data/tsg/olap/spark-2.2.3-bin-hadoop2.7/tmp/spark-events
+spark.history.fs.cleaner.enabled true
+spark.history.fs.cleaner.interval 1d
+spark.history.fs.cleaner.maxAge 7d
+spark.executor.logs.rolling.strategy time
+spark.executor.logs.rolling.time.interval daily
+spark.executor.logs.rolling.maxRetainedFiles 10
diff --git a/MPE/spark/conf/spark-defaults.conf.template b/MPE/spark/conf/spark-defaults.conf.template
new file mode 100644
index 0000000..f18b450
--- /dev/null
+++ b/MPE/spark/conf/spark-defaults.conf.template
@@ -0,0 +1,29 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Default system properties included when running spark-submit.
+# This is useful for setting default environmental settings.
+
+# Example:
+# spark.master spark://master:7077
+# spark.eventLog.enabled true
+# spark.eventLog.dir hdfs://namenode:8021/directory
+# spark.serializer org.apache.spark.serializer.KryoSerializer
+# spark.driver.memory 5g
+# spark.executor.extraJavaOptions -XX:+PrintGCDetails -Dkey=value -Dnumbers="one two three"
+#
+
diff --git a/MPE/spark/conf/spark-env.sh b/MPE/spark/conf/spark-env.sh
new file mode 100644
index 0000000..44ca930
--- /dev/null
+++ b/MPE/spark/conf/spark-env.sh
@@ -0,0 +1,83 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This file is sourced when running various Spark programs.
+# Copy it as spark-env.sh and edit that to configure Spark for your site.
+
+# Options read when launching programs locally with
+# ./bin/run-example or ./bin/spark-submit
+# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files
+# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node
+# - SPARK_PUBLIC_DNS, to set the public dns name of the driver program
+
+# Options read by executors and drivers running inside the cluster
+# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node
+# - SPARK_PUBLIC_DNS, to set the public DNS name of the driver program
+# - SPARK_LOCAL_DIRS, storage directories to use on this node for shuffle and RDD data
+# - MESOS_NATIVE_JAVA_LIBRARY, to point to your libmesos.so if you use Mesos
+
+# Options read in YARN client mode
+# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files
+# - SPARK_EXECUTOR_CORES, Number of cores for the executors (Default: 1).
+# - SPARK_EXECUTOR_MEMORY, Memory per Executor (e.g. 1000M, 2G) (Default: 1G)
+# - SPARK_DRIVER_MEMORY, Memory for Driver (e.g. 1000M, 2G) (Default: 1G)
+
+# Options for the daemons used in the standalone deploy mode
+# - SPARK_MASTER_HOST, to bind the master to a different IP address or hostname
+# - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports for the master
+# - SPARK_MASTER_OPTS, to set config properties only for the master (e.g. "-Dx=y")
+# - SPARK_WORKER_CORES, to set the number of cores to use on this machine
+# - SPARK_WORKER_MEMORY, to set how much total memory workers have to give executors (e.g. 1000m, 2g)
+# - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT, to use non-default ports for the worker
+# - SPARK_WORKER_DIR, to set the working directory of worker processes
+# - SPARK_WORKER_OPTS, to set config properties only for the worker (e.g. "-Dx=y")
+# - SPARK_DAEMON_MEMORY, to allocate to the master, worker and history server themselves (default: 1g).
+# - SPARK_HISTORY_OPTS, to set config properties only for the history server (e.g. "-Dx=y")
+# - SPARK_SHUFFLE_OPTS, to set config properties only for the external shuffle service (e.g. "-Dx=y")
+# - SPARK_DAEMON_JAVA_OPTS, to set config properties for all daemons (e.g. "-Dx=y")
+# - SPARK_DAEMON_CLASSPATH, to set the classpath for all daemons
+# - SPARK_PUBLIC_DNS, to set the public dns name of the master or workers
+
+# Generic options for the daemons used in the standalone deploy mode
+# - SPARK_CONF_DIR Alternate conf dir. (Default: ${SPARK_HOME}/conf)
+# - SPARK_LOG_DIR Where log files are stored. (Default: ${SPARK_HOME}/logs)
+# - SPARK_PID_DIR Where the pid file is stored. (Default: /tmp)
+# - SPARK_IDENT_STRING A string representing this instance of spark. (Default: $USER)
+# - SPARK_NICENESS The scheduling priority for daemons. (Default: 0)
+# - SPARK_NO_DAEMONIZE Run the proposed command in the foreground. It will not output a PID file.
+
+export SPARK_PID_DIR=/data/tsg/olap/spark-2.2.3-bin-hadoop2.7/tmp
+export SPARK_MASTER_IP=192.168.20.223
+export SPARK_MASTER_PORT=7077
+export SPARK_MASTER_WEBUI_PORT=7080
+export SPARK_WORKER_CORES=30
+export SPARK_WORKER_INSTANCES=1
+export SPARK_WORKER_MEMORY=1024m
+export JAVA_HOME=/usr/lib/jvm/jdk1.8.0_73
+export SCALA_HOME=/usr/lib/scala/scala-2.11.7
+export SPARK_WORKER_PORT=7070
+
+
+export SPARK_WORKER_OPTS="
+-Dspark.worker.cleanup.enabled=true
+-Dspark.worker.cleanup.interval=86400
+-Dspark.worker.cleanup.appDataTtl=604800
+"
+
+export SPARK_PID_DIR=${SPARK_HOME}/tmp
diff --git a/MPE/spark/conf/spark-env.sh.template b/MPE/spark/conf/spark-env.sh.template
new file mode 100644
index 0000000..0f9150b
--- /dev/null
+++ b/MPE/spark/conf/spark-env.sh.template
@@ -0,0 +1,63 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This file is sourced when running various Spark programs.
+# Copy it as spark-env.sh and edit that to configure Spark for your site.
+
+# Options read when launching programs locally with
+# ./bin/run-example or ./bin/spark-submit
+# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files
+# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node
+# - SPARK_PUBLIC_DNS, to set the public dns name of the driver program
+
+# Options read by executors and drivers running inside the cluster
+# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node
+# - SPARK_PUBLIC_DNS, to set the public DNS name of the driver program
+# - SPARK_LOCAL_DIRS, storage directories to use on this node for shuffle and RDD data
+# - MESOS_NATIVE_JAVA_LIBRARY, to point to your libmesos.so if you use Mesos
+
+# Options read in YARN client mode
+# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files
+# - SPARK_EXECUTOR_CORES, Number of cores for the executors (Default: 1).
+# - SPARK_EXECUTOR_MEMORY, Memory per Executor (e.g. 1000M, 2G) (Default: 1G)
+# - SPARK_DRIVER_MEMORY, Memory for Driver (e.g. 1000M, 2G) (Default: 1G)
+
+# Options for the daemons used in the standalone deploy mode
+# - SPARK_MASTER_HOST, to bind the master to a different IP address or hostname
+# - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports for the master
+# - SPARK_MASTER_OPTS, to set config properties only for the master (e.g. "-Dx=y")
+# - SPARK_WORKER_CORES, to set the number of cores to use on this machine
+# - SPARK_WORKER_MEMORY, to set how much total memory workers have to give executors (e.g. 1000m, 2g)
+# - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT, to use non-default ports for the worker
+# - SPARK_WORKER_DIR, to set the working directory of worker processes
+# - SPARK_WORKER_OPTS, to set config properties only for the worker (e.g. "-Dx=y")
+# - SPARK_DAEMON_MEMORY, to allocate to the master, worker and history server themselves (default: 1g).
+# - SPARK_HISTORY_OPTS, to set config properties only for the history server (e.g. "-Dx=y")
+# - SPARK_SHUFFLE_OPTS, to set config properties only for the external shuffle service (e.g. "-Dx=y")
+# - SPARK_DAEMON_JAVA_OPTS, to set config properties for all daemons (e.g. "-Dx=y")
+# - SPARK_DAEMON_CLASSPATH, to set the classpath for all daemons
+# - SPARK_PUBLIC_DNS, to set the public dns name of the master or workers
+
+# Generic options for the daemons used in the standalone deploy mode
+# - SPARK_CONF_DIR Alternate conf dir. (Default: ${SPARK_HOME}/conf)
+# - SPARK_LOG_DIR Where log files are stored. (Default: ${SPARK_HOME}/logs)
+# - SPARK_PID_DIR Where the pid file is stored. (Default: /tmp)
+# - SPARK_IDENT_STRING A string representing this instance of spark. (Default: $USER)
+# - SPARK_NICENESS The scheduling priority for daemons. (Default: 0)
+# - SPARK_NO_DAEMONIZE Run the proposed command in the foreground. It will not output a PID file.
diff --git a/MPE/spark/iplearning/application.properties b/MPE/spark/iplearning/application.properties
new file mode 100644
index 0000000..9755a8f
--- /dev/null
+++ b/MPE/spark/iplearning/application.properties
@@ -0,0 +1,44 @@
+#spark任务配置
+spark.sql.shuffle.partitions=30
+spark.executor.memory=1g
+spark.executor.cores=5
+spark.cores.max=30
+spark.app.name=test
+spark.network.timeout=300s
+spark.serializer=org.apache.spark.serializer.KryoSerializer
+master=spark://192.168.20.223:7077
+#spark读取clickhouse配置
+spark.read.clickhouse.url=jdbc:clickhouse://192.168.20.252:8124/tsg_galaxy_v3
+spark.read.clickhouse.driver=ru.yandex.clickhouse.ClickHouseDriver
+spark.read.clickhouse.user=default
+spark.read.clickhouse.password=galaxy2019
+spark.read.clickhouse.numPartitions=5
+spark.read.clickhouse.fetchsize=10000
+spark.read.clickhouse.partitionColumn=LAST_FOUND_TIME
+spark.read.clickhouse.session.table=session_record
+spark.read.clickhouse.radius.table=radius_record
+clickhouse.socket.timeout=3600000
+#arangoDB配置
+arangoDB.host=192.168.20.222
+arangoDB.port=8529
+arangoDB.user=upsert
+arangoDB.password=galaxy2019
+arangoDB.DB.name=tsg_galaxy_v3
+arangoDB.ttl=3600
+
+thread.pool.number=10
+
+#读取clickhouse时间范围方式,0:读取过去一小时;1:指定时间范围
+clickhouse.time.limit.type=0
+read.clickhouse.max.time=1608518990
+read.clickhouse.min.time=1604851201
+
+update.arango.batch=10000
+
+distinct.client.ip.num=10000
+recent.count.hour=24
+
+update.interval=3600
+arangodb.total.num=100000000
+#读取radius时间范围,与radius任务执行周期一致,单位:分钟
+read.radius.granularity=-60
diff --git a/MPE/spark/iplearning/ip-learning-spark.jar b/MPE/spark/iplearning/ip-learning-spark.jar
new file mode 100644
index 0000000..6a5af10
--- /dev/null
+++ b/MPE/spark/iplearning/ip-learning-spark.jar
Binary files differ
diff --git a/MPE/spark/iplearning/iplearning.sh b/MPE/spark/iplearning/iplearning.sh
new file mode 100644
index 0000000..48f64b1
--- /dev/null
+++ b/MPE/spark/iplearning/iplearning.sh
@@ -0,0 +1,53 @@
+#!/bin/bash
+source /etc/profile
+
+jar_name="ip-learning-spark.jar"
+
+spark_home=/data/tsg/olap/spark-2.2.3-bin-hadoop2.7
+
+isrun=`ps -ef | grep -w "ip-learning-spark.jar" | grep -v grep | wc -l`
+
+if [ ! -f "$spark_home/iplearning/runnum" ];then
+ echo 0 > $spark_home/iplearning/runnum
+fi
+
+if [ ! -f "$spark_home/iplearning/logs" ];then
+ mkdir -p $spark_home/iplearning/logs
+ mkdir -p $spark_home/iplearning/logs/IpLearningApplication
+ mkdir -p $spark_home/iplearning/logs/IpRecommendApplication
+ mkdir -p $spark_home/iplearning/logs/SubscriberRecommendApplication
+fi
+
+#start spark job
+function startJob(){
+
+echo "IpLearningApplication 程序运行 - `date "+%Y-%m-%d %H:%M:%S"`" >> $spark_home/iplearning/logs/IpLearningApplication/running-`date +'%Y-%m'`.log
+
+nohup ${spark_home}/bin/spark-submit --class cn.ac.iie.main.IpLearningApplication --driver-java-options "-Dlog4j.configuration=file:$spark_home/conf/log4j-task.properties" --conf spark.executor.extraJavaOptions="-Dlog4j.configuration=file:$spark_home/conf/log4j-task.properties" ${spark_home}/iplearning/$jar_name >> $spark_home/iplearning/logs/IpLearningApplication/running-`date +'%Y-%m'`.log
+
+echo "IpRecommendApplication 程序运行 - `date "+%Y-%m-%d %H:%M:%S"`" >> $spark_home/iplearning/logs/IpRecommendApplication/running-`date +'%Y-%m'`.log
+
+nohup ${spark_home}/bin/spark-submit --class cn.ac.iie.main.IpRecommendApplication --driver-java-options "-Dlog4j.configuration=file:$spark_home/conf/log4j-task.properties" --conf spark.executor.extraJavaOptions="-Dlog4j.configuration=file:$spark_home/conf/log4j-task.properties" ${spark_home}/iplearning/$jar_name >> $spark_home/iplearning/logs/IpRecommendApplication/running-`date +'%Y-%m'`.log
+
+echo "SubscriberRecommendApplication 程序运行 - `date "+%Y-%m-%d %H:%M:%S"`" >> $spark_home/iplearning/logs/SubscriberRecommendApplication/running-`date +'%Y-%m'`.log
+
+nohup ${spark_home}/bin/spark-submit --class cn.ac.iie.main.SubscriberRecommendApplication --driver-java-options "-Dlog4j.configuration=file:$spark_home/conf/log4j-task.properties" --conf spark.executor.extraJavaOptions="-Dlog4j.configuration=file:$spark_home/conf/log4j-task.properties" ${spark_home}/iplearning/$jar_name >> $spark_home/iplearning/logs/SubscriberRecommendApplication/running-`date +'%Y-%m'`.log
+
+}
+
+if [[ $isrun -ge 1 ]];then
+ OLD_NUM=`cat $spark_home/iplearning/runnum`
+ RESTART_NUM=`expr $OLD_NUM + 1`
+ echo $RESTART_NUM > $spark_home/iplearning/runnum
+
+ if [ $RESTART_NUM -eq "3" ];then
+ pgrep -f "ip-learning-spark.jar" |xargs kill -9
+ startJob
+ echo 1 > $spark_home/iplearning/runnum
+ fi
+else
+ startJob
+ echo 1 > $spark_home/iplearning/runnum
+fi
+
+
diff --git a/MPE/spark/iplearning/iplearning_monitor.sh b/MPE/spark/iplearning/iplearning_monitor.sh
new file mode 100644
index 0000000..59e4c3d
--- /dev/null
+++ b/MPE/spark/iplearning/iplearning_monitor.sh
@@ -0,0 +1,56 @@
+#! /bin/bash
+
+source /etc/profile
+
+#iplearning 日志文件目录
+#iplearning_log_file='/data/tsg/olap/spark-2.2.3-bin-hadoop2.7/logs/ip-learning-application.log'
+iplearning_log_file='/data/tsg/olap/spark-2.2.3-bin-hadoop2.7/logs/ip-learning-application-`date +'%Y-%m-%d'`.log'
+iplearning_monitor_prom_file="/data/tsg/olap/galaxy/volumes/node-exporter/prom/iplearning_monitor.prom"
+
+#arangoDB R_LOCATE_FQDN2IP数量
+fqdn_locate_ip_arango_count=`sed -n '/RETURN LENGTH(R_LOCATE_FQDN2IP)/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'`
+echo "fqdn_locate_ip_arango_count $fqdn_locate_ip_arango_count" > $iplearning_monitor_prom_file
+
+#读取arangoDB R_LOCATE_FQDN2IP耗时
+read_fqdn_locate_ip_arango_time=`sed -n '/读取R_LOCATE_FQDN2IP arangoDB 共耗时/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'`
+echo "read_fqdn_locate_ip_arango_time $read_fqdn_locate_ip_arango_time" >> $iplearning_monitor_prom_file
+
+#clickhouse fqdn-locate-ip边关系数量
+fqdn_locate_ip_ck_count=`sed -n '/读取R_LOCATE_FQDN2IP clickhouse成功/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'`
+echo fqdn_locate_ip_ck_count $fqdn_locate_ip_ck_count >> $iplearning_monitor_prom_file
+
+#更新R_LOCATE_FQDN2IP耗时
+update_fqdn_locate_ip_time=`sed -n '/更新R_LOCATE_FQDN2IP 时间/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'`
+echo update_fqdn_locate_ip_time $update_fqdn_locate_ip_time >> $iplearning_monitor_prom_file
+
+#arangoDB SUBSCRIBER数量
+subid_arango_count=`sed -n '/RETURN LENGTH(SUBSCRIBER)/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'`
+echo subid_arango_count $subid_arango_count >> $iplearning_monitor_prom_file
+
+#读取arangoDB SUBSCRIBER耗时
+read_subid_arango_time=`sed -n '/读取SUBSCRIBER arangoDB 共耗时/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'`
+echo read_subid_arango_time $read_subid_arango_time >> $iplearning_monitor_prom_file
+
+#clickhouse subscriberid点数量
+subid_ck_count=`sed -n '/读取SUBSCRIBER clickhouse成功/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'`
+echo subid_ck_count $subid_ck_count >> $iplearning_monitor_prom_file
+
+#更新SUBSCRIBER耗时
+update_subid_time=`sed -n '/更新SUBSCRIBER 时间/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'`
+echo update_subid_time $update_subid_time >> $iplearning_monitor_prom_file
+
+#arangoDB R_LOCATE_SUBSCRIBER2IP数量
+subid_locate_ip_arango_count=`sed -n '/RETURN LENGTH(R_LOCATE_SUBSCRIBER2IP)/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'`
+echo subid_locate_ip_arango_count $subid_locate_ip_arango_count >> $iplearning_monitor_prom_file
+
+#读取arangoDB R_LOCATE_SUBSCRIBER2IP耗时
+read_subid_locate_ip_arango_time=`sed -n '/读取R_LOCATE_SUBSCRIBER2IP arangoDB 共耗时/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'`
+echo read_subid_locate_ip_arango_time $read_subid_locate_ip_arango_time >> $iplearning_monitor_prom_file
+
+#clickhouse subscriberid-locate-ip边关系数量
+subid_locate_ip_ck_count=`sed -n '/读取R_LOCATE_SUBSCRIBER2IP clickhouse成功/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'`
+echo subid_locate_ip_ck_count $subid_locate_ip_ck_count >> $iplearning_monitor_prom_file
+
+#更新R_LOCATE_SUBSCRIBER2IP耗时
+update_subid_locate_ip_time=`sed -n '/更新R_LOCATE_SUBSCRIBER2IP 时间/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'`
+echo update_subid_locate_ip_time $update_subid_locate_ip_time >> $iplearning_monitor_prom_file
diff --git a/MPE/spark/iplearning/logs/IpLearningApplication/running-2023-07.log b/MPE/spark/iplearning/logs/IpLearningApplication/running-2023-07.log
new file mode 100644
index 0000000..49f701e
--- /dev/null
+++ b/MPE/spark/iplearning/logs/IpLearningApplication/running-2023-07.log
@@ -0,0 +1,9683 @@
+IpLearningApplication 程序运行 - 2023-07-12 04:40:31
+[2023-07-12 04:41:33+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 04:41:33+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 05:00:00
+[2023-07-12 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 06:00:00
+[2023-07-12 06:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 07:00:00
+[2023-07-12 07:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 08:00:00
+[2023-07-12 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 09:00:00
+[2023-07-12 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 10:00:00
+[2023-07-12 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 11:00:00
+[2023-07-12 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 12:00:00
+[2023-07-12 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 13:00:00
+[2023-07-12 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 14:00:00
+[2023-07-12 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 15:00:00
+[2023-07-12 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 16:00:00
+[2023-07-12 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 17:00:00
+[2023-07-12 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 18:00:00
+[2023-07-12 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 19:00:00
+[2023-07-12 19:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 20:00:00
+[2023-07-12 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 21:00:00
+[2023-07-12 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 22:00:00
+[2023-07-12 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-12 23:00:00
+[2023-07-12 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 00:00:00
+[2023-07-13 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 01:00:00
+[2023-07-13 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 02:00:00
+[2023-07-13 02:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 03:00:00
+[2023-07-13 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 04:00:00
+[2023-07-13 04:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 05:00:00
+[2023-07-13 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 06:00:00
+[2023-07-13 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 07:00:00
+[2023-07-13 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 08:00:00
+[2023-07-13 08:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 09:00:00
+[2023-07-13 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 10:00:00
+[2023-07-13 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 11:00:00
+[2023-07-13 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 12:00:00
+[2023-07-13 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 13:00:00
+[2023-07-13 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 14:00:00
+[2023-07-13 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 15:00:00
+[2023-07-13 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 16:00:00
+[2023-07-13 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 17:00:00
+[2023-07-13 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 18:00:00
+[2023-07-13 18:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 19:00:00
+[2023-07-13 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 20:00:00
+[2023-07-13 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 21:00:00
+[2023-07-13 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 22:00:00
+[2023-07-13 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-13 23:00:00
+[2023-07-13 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 00:00:00
+[2023-07-14 00:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 01:00:00
+[2023-07-14 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 02:00:00
+[2023-07-14 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 03:00:00
+[2023-07-14 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 03:01:03+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 04:00:00
+[2023-07-14 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 05:00:00
+[2023-07-14 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 06:00:00
+[2023-07-14 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 07:00:00
+[2023-07-14 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 08:00:00
+[2023-07-14 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 09:00:00
+[2023-07-14 09:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 10:00:00
+[2023-07-14 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 11:00:00
+[2023-07-14 11:01:03+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 11:01:03+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 12:00:00
+[2023-07-14 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 13:00:00
+[2023-07-14 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 14:00:00
+[2023-07-14 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 15:00:00
+[2023-07-14 15:01:03+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 15:01:03+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 16:00:00
+[2023-07-14 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 17:00:00
+[2023-07-14 17:01:03+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 17:01:03+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 18:00:00
+[2023-07-14 18:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 19:00:00
+[2023-07-14 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 20:00:00
+[2023-07-14 20:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 21:00:00
+[2023-07-14 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 22:00:00
+[2023-07-14 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-14 23:00:00
+[2023-07-14 23:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 00:00:00
+[2023-07-15 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 01:00:00
+[2023-07-15 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 02:00:00
+[2023-07-15 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 03:00:00
+[2023-07-15 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 04:00:00
+[2023-07-15 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 05:00:00
+[2023-07-15 05:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 06:00:00
+[2023-07-15 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 07:00:00
+[2023-07-15 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 08:00:00
+[2023-07-15 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 09:00:00
+[2023-07-15 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 10:00:00
+[2023-07-15 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 11:00:00
+[2023-07-15 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 12:00:00
+[2023-07-15 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 13:00:00
+[2023-07-15 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 14:00:00
+[2023-07-15 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 15:00:00
+[2023-07-15 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 16:00:00
+[2023-07-15 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 17:00:00
+[2023-07-15 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 18:00:00
+[2023-07-15 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 19:00:00
+[2023-07-15 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 20:00:00
+[2023-07-15 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 21:00:00
+[2023-07-15 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 22:00:00
+[2023-07-15 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-15 23:00:00
+[2023-07-15 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 00:00:00
+[2023-07-16 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 01:00:00
+[2023-07-16 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 02:00:00
+[2023-07-16 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 03:00:00
+[2023-07-16 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 04:00:00
+[2023-07-16 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 05:00:00
+[2023-07-16 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 06:00:00
+[2023-07-16 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 07:00:00
+[2023-07-16 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 08:00:00
+[2023-07-16 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 09:00:00
+[2023-07-16 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 10:00:00
+[2023-07-16 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 11:00:00
+[2023-07-16 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 12:00:00
+[2023-07-16 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 13:00:00
+[2023-07-16 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 14:00:00
+[2023-07-16 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 15:00:00
+[2023-07-16 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 16:00:00
+[2023-07-16 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 17:00:00
+[2023-07-16 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 18:00:00
+[2023-07-16 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 19:00:00
+[2023-07-16 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 20:00:00
+[2023-07-16 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 21:00:00
+[2023-07-16 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 22:00:00
+[2023-07-16 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-16 23:00:00
+[2023-07-16 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 00:00:00
+[2023-07-17 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 01:00:00
+[2023-07-17 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 02:00:00
+[2023-07-17 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 03:00:00
+[2023-07-17 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 04:00:00
+[2023-07-17 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 05:00:00
+[2023-07-17 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 06:00:00
+[2023-07-17 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 07:00:00
+[2023-07-17 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 08:00:00
+[2023-07-17 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 09:00:00
+[2023-07-17 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 10:00:00
+[2023-07-17 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 11:00:00
+[2023-07-17 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 12:00:00
+[2023-07-17 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 13:00:00
+[2023-07-17 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 14:00:00
+[2023-07-17 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 15:00:00
+[2023-07-17 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 16:00:00
+[2023-07-17 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 17:00:00
+[2023-07-17 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 18:00:00
+[2023-07-17 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 19:00:00
+[2023-07-17 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 20:00:00
+[2023-07-17 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 21:00:00
+[2023-07-17 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 22:00:00
+[2023-07-17 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-17 23:00:00
+[2023-07-17 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 00:00:00
+[2023-07-18 00:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 01:00:00
+[2023-07-18 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 02:00:00
+[2023-07-18 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 03:00:00
+[2023-07-18 03:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 04:00:00
+[2023-07-18 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 05:00:00
+[2023-07-18 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 06:00:00
+[2023-07-18 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 07:00:00
+[2023-07-18 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 08:00:00
+[2023-07-18 08:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 09:00:00
+[2023-07-18 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 10:00:00
+[2023-07-18 10:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 11:00:00
+[2023-07-18 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 12:00:00
+[2023-07-18 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 13:00:00
+[2023-07-18 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 14:00:00
+[2023-07-18 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 15:00:00
+[2023-07-18 15:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 16:00:00
+[2023-07-18 16:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 17:00:00
+[2023-07-18 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 18:00:00
+[2023-07-18 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 19:00:00
+[2023-07-18 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 20:00:00
+[2023-07-18 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 21:00:00
+[2023-07-18 21:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 22:00:00
+[2023-07-18 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-18 23:00:00
+[2023-07-18 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 00:00:00
+[2023-07-19 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 01:00:00
+[2023-07-19 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 02:00:00
+[2023-07-19 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 03:00:00
+[2023-07-19 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 04:00:00
+[2023-07-19 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 05:00:00
+[2023-07-19 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 06:00:00
+[2023-07-19 06:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 07:00:00
+[2023-07-19 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 08:00:00
+[2023-07-19 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 09:00:00
+[2023-07-19 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 10:00:00
+[2023-07-19 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 10:01:03+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 11:00:00
+[2023-07-19 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 12:00:00
+[2023-07-19 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 13:00:00
+[2023-07-19 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 14:00:00
+[2023-07-19 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 15:00:00
+[2023-07-19 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 16:00:00
+[2023-07-19 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 17:00:00
+[2023-07-19 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 18:00:00
+[2023-07-19 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 19:00:00
+[2023-07-19 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 20:00:00
+[2023-07-19 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 21:00:00
+[2023-07-19 21:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 22:00:00
+[2023-07-19 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-19 23:00:00
+[2023-07-19 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 00:00:00
+[2023-07-20 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 01:00:00
+[2023-07-20 01:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 02:00:00
+[2023-07-20 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 03:00:00
+[2023-07-20 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 04:00:00
+[2023-07-20 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 05:00:00
+[2023-07-20 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 06:00:00
+[2023-07-20 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 07:00:00
+[2023-07-20 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 08:00:00
+[2023-07-20 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 09:00:00
+[2023-07-20 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 10:00:00
+[2023-07-20 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 11:00:00
+[2023-07-20 11:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 12:00:00
+[2023-07-20 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 13:00:00
+[2023-07-20 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 14:00:00
+[2023-07-20 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 15:00:00
+[2023-07-20 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 16:00:00
+[2023-07-20 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 17:00:00
+[2023-07-20 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 18:00:00
+[2023-07-20 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 19:00:00
+[2023-07-20 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 20:00:00
+[2023-07-20 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 21:00:00
+[2023-07-20 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 22:00:00
+[2023-07-20 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-20 23:00:00
+[2023-07-20 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 00:00:00
+[2023-07-21 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 01:00:00
+[2023-07-21 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 02:00:00
+[2023-07-21 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 03:00:00
+[2023-07-21 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 04:00:00
+[2023-07-21 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 05:00:00
+[2023-07-21 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 06:00:00
+[2023-07-21 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 07:00:00
+[2023-07-21 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 08:00:00
+[2023-07-21 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 09:00:00
+[2023-07-21 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 10:00:00
+[2023-07-21 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 11:00:00
+[2023-07-21 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 12:00:00
+[2023-07-21 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 13:00:00
+[2023-07-21 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 14:00:00
+[2023-07-21 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 15:00:00
+[2023-07-21 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 16:00:00
+[2023-07-21 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 17:00:00
+[2023-07-21 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 18:00:00
+[2023-07-21 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 19:00:00
+[2023-07-21 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 20:00:00
+[2023-07-21 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 21:00:00
+[2023-07-21 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 22:00:00
+[2023-07-21 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-21 23:00:00
+[2023-07-21 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 00:00:00
+[2023-07-22 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 01:00:00
+[2023-07-22 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 02:00:00
+[2023-07-22 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 03:00:00
+[2023-07-22 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 04:00:00
+[2023-07-22 04:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 05:00:00
+[2023-07-22 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 06:00:00
+[2023-07-22 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 07:00:00
+[2023-07-22 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 08:00:00
+[2023-07-22 08:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 09:00:00
+[2023-07-22 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 10:00:00
+[2023-07-22 10:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 11:00:00
+[2023-07-22 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 12:00:00
+[2023-07-22 12:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 13:00:00
+[2023-07-22 13:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 14:00:00
+[2023-07-22 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 15:00:00
+[2023-07-22 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 16:00:00
+[2023-07-22 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 17:00:00
+[2023-07-22 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 18:00:00
+[2023-07-22 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 19:00:00
+[2023-07-22 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 20:00:00
+[2023-07-22 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 21:00:00
+[2023-07-22 21:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 22:00:00
+[2023-07-22 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-22 23:00:00
+[2023-07-22 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 00:00:00
+[2023-07-23 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 01:00:00
+[2023-07-23 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 02:00:00
+[2023-07-23 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 03:00:00
+[2023-07-23 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 04:00:00
+[2023-07-23 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 05:00:00
+[2023-07-23 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 06:00:00
+[2023-07-23 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 07:00:00
+[2023-07-23 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 08:00:00
+[2023-07-23 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 09:00:00
+[2023-07-23 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 10:00:00
+[2023-07-23 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 11:00:00
+[2023-07-23 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 12:00:00
+[2023-07-23 12:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 13:00:00
+[2023-07-23 13:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 14:00:00
+[2023-07-23 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 15:00:00
+[2023-07-23 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 16:00:00
+[2023-07-23 16:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 17:00:00
+[2023-07-23 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 18:00:00
+[2023-07-23 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 19:00:00
+[2023-07-23 19:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 20:00:00
+[2023-07-23 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+org.apache.spark.SparkException: Exception thrown in awaitResult:
+ at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:205)
+ at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)
+ at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92)
+ at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76)
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+Caused by: org.apache.spark.SparkException: Could not find BlockManagerMaster.
+ at org.apache.spark.rpc.netty.Dispatcher.postMessage(Dispatcher.scala:157)
+ at org.apache.spark.rpc.netty.Dispatcher.postLocalMessage(Dispatcher.scala:132)
+ at org.apache.spark.rpc.netty.NettyRpcEnv.ask(NettyRpcEnv.scala:228)
+ at org.apache.spark.rpc.netty.NettyRpcEndpointRef.ask(NettyRpcEnv.scala:522)
+ at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:91)
+ ... 30 more
+IpLearningApplication 程序运行 - 2023-07-23 21:00:00
+[2023-07-23 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 22:00:00
+[2023-07-23 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-23 23:00:00
+[2023-07-23 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-24 00:00:00
+[2023-07-24 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-24 01:00:00
+[2023-07-24 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-24 02:00:00
+[2023-07-24 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-24 03:00:00
+[2023-07-24 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-24 04:00:00
+[2023-07-24 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-24 05:00:00
+[2023-07-24 05:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-24 06:00:00
+[2023-07-24 06:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-24 07:00:00
+[2023-07-24 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-24 08:00:00
+[2023-07-24 08:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-24 09:00:00
+[2023-07-24 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-24 10:00:00
+[2023-07-24 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpLearningApplication 程序运行 - 2023-07-24 11:00:00
+[2023-07-24 11:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22)
+ at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8)
+ at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
diff --git a/MPE/spark/iplearning/logs/IpRecommendApplication/running-2023-07.log b/MPE/spark/iplearning/logs/IpRecommendApplication/running-2023-07.log
new file mode 100644
index 0000000..905893a
--- /dev/null
+++ b/MPE/spark/iplearning/logs/IpRecommendApplication/running-2023-07.log
@@ -0,0 +1,9640 @@
+IpRecommendApplication 程序运行 - 2023-07-12 04:41:34
+[2023-07-12 04:42:36+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 04:42:36+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 05:01:02
+[2023-07-12 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 05:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 06:01:02
+[2023-07-12 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 06:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 07:01:02
+[2023-07-12 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 07:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 08:01:02
+[2023-07-12 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 08:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 09:01:02
+[2023-07-12 09:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 10:01:02
+[2023-07-12 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 10:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 11:01:02
+[2023-07-12 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 11:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 12:01:02
+[2023-07-12 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 12:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 13:01:02
+[2023-07-12 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 14:01:02
+[2023-07-12 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 14:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 15:01:02
+[2023-07-12 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 15:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 16:01:03
+[2023-07-12 16:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 17:01:02
+[2023-07-12 17:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 18:01:02
+[2023-07-12 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 18:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 19:01:02
+[2023-07-12 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 20:01:02
+[2023-07-12 20:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 21:01:02
+[2023-07-12 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 21:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 22:01:02
+[2023-07-12 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-12 23:01:02
+[2023-07-12 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 23:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 00:01:02
+[2023-07-13 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 00:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 01:01:02
+[2023-07-13 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 01:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 02:01:02
+[2023-07-13 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 02:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 03:01:02
+[2023-07-13 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 04:01:02
+[2023-07-13 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 04:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 05:01:02
+[2023-07-13 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 06:01:02
+[2023-07-13 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 06:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 07:01:02
+[2023-07-13 07:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 07:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 08:01:02
+[2023-07-13 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 08:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 09:01:02
+[2023-07-13 09:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 10:01:02
+[2023-07-13 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 10:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 11:01:02
+[2023-07-13 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 11:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 12:01:03
+[2023-07-13 12:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 13:01:02
+[2023-07-13 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 14:01:02
+[2023-07-13 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 15:01:02
+[2023-07-13 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 15:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 16:01:02
+[2023-07-13 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 16:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 17:01:03
+[2023-07-13 17:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 18:01:02
+[2023-07-13 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 18:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 19:01:02
+[2023-07-13 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 20:01:02
+[2023-07-13 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 21:01:02
+[2023-07-13 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 21:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 22:01:02
+[2023-07-13 22:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-13 23:01:02
+[2023-07-13 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 23:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 00:01:02
+[2023-07-14 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 00:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 01:01:02
+[2023-07-14 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 01:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 02:01:02
+[2023-07-14 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 03:01:03
+[2023-07-14 03:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 04:01:02
+[2023-07-14 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 05:01:02
+[2023-07-14 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 06:01:02
+[2023-07-14 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 06:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 07:01:02
+[2023-07-14 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 07:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 08:01:02
+[2023-07-14 08:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 08:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 09:01:02
+[2023-07-14 09:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 09:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 10:01:02
+[2023-07-14 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 10:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 11:01:04
+[2023-07-14 11:02:06+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 11:02:06+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 12:01:02
+[2023-07-14 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 13:01:02
+[2023-07-14 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 13:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 14:01:02
+[2023-07-14 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 15:01:04
+[2023-07-14 15:02:06+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 15:02:06+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 16:01:03
+[2023-07-14 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 17:01:03
+[2023-07-14 17:02:06+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 17:02:06+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 18:01:02
+[2023-07-14 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 18:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 19:01:03
+[2023-07-14 19:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 20:01:02
+[2023-07-14 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 20:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 21:01:02
+[2023-07-14 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 21:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 22:01:02
+[2023-07-14 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 22:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-14 23:01:02
+[2023-07-14 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 23:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 00:01:02
+[2023-07-15 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 00:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 01:01:02
+[2023-07-15 01:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 01:02:06+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 02:01:02
+[2023-07-15 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 03:01:02
+[2023-07-15 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 04:01:02
+[2023-07-15 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 05:01:02
+[2023-07-15 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 06:01:02
+[2023-07-15 06:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 06:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 07:01:02
+[2023-07-15 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 07:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 08:01:02
+[2023-07-15 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 08:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 09:01:02
+[2023-07-15 09:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 10:01:02
+[2023-07-15 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 10:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 11:01:02
+[2023-07-15 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 11:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 12:01:02
+[2023-07-15 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 13:01:02
+[2023-07-15 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 13:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 14:01:03
+[2023-07-15 14:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 15:01:02
+[2023-07-15 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 15:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 16:01:02
+[2023-07-15 16:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 17:01:02
+[2023-07-15 17:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 18:01:02
+[2023-07-15 18:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 18:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 19:01:02
+[2023-07-15 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 20:01:02
+[2023-07-15 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 21:01:02
+[2023-07-15 21:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 21:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 22:01:02
+[2023-07-15 22:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-15 23:01:03
+[2023-07-15 23:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 23:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 00:01:02
+[2023-07-16 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 00:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 01:01:02
+[2023-07-16 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 01:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 02:01:02
+[2023-07-16 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 02:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 03:01:02
+[2023-07-16 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 04:01:02
+[2023-07-16 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 05:01:02
+[2023-07-16 05:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 06:01:02
+[2023-07-16 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 06:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 07:01:02
+[2023-07-16 07:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 07:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 08:01:03
+[2023-07-16 08:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 08:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 09:01:02
+[2023-07-16 09:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 10:01:02
+[2023-07-16 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 10:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 11:01:03
+[2023-07-16 11:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 11:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 12:01:02
+[2023-07-16 12:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 13:01:02
+[2023-07-16 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 14:01:02
+[2023-07-16 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 15:01:03
+[2023-07-16 15:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 15:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 16:01:02
+[2023-07-16 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 17:01:02
+[2023-07-16 17:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 18:01:02
+[2023-07-16 18:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 18:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 19:01:02
+[2023-07-16 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 20:01:02
+[2023-07-16 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 21:01:02
+[2023-07-16 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 21:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 22:01:02
+[2023-07-16 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 22:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-16 23:01:02
+[2023-07-16 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 23:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 00:01:03
+[2023-07-17 00:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 00:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 01:01:02
+[2023-07-17 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 01:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 02:01:02
+[2023-07-17 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 03:01:02
+[2023-07-17 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 04:01:02
+[2023-07-17 04:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 05:01:02
+[2023-07-17 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 06:01:02
+[2023-07-17 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 06:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 07:01:02
+[2023-07-17 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 07:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 08:01:03
+[2023-07-17 08:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 08:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 09:01:02
+[2023-07-17 09:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 10:01:02
+[2023-07-17 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 10:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 11:01:02
+[2023-07-17 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 11:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 12:01:02
+[2023-07-17 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 13:01:02
+[2023-07-17 13:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 14:01:02
+[2023-07-17 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 15:01:02
+[2023-07-17 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 15:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 16:01:03
+[2023-07-17 16:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 17:01:02
+[2023-07-17 17:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 18:01:02
+[2023-07-17 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 18:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 19:01:02
+[2023-07-17 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 20:01:02
+[2023-07-17 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 21:01:02
+[2023-07-17 21:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 21:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 22:01:03
+[2023-07-17 22:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-17 23:01:02
+[2023-07-17 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 23:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 00:01:02
+[2023-07-18 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 00:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 01:01:02
+[2023-07-18 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 01:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 02:01:02
+[2023-07-18 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 03:01:02
+[2023-07-18 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 03:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 04:01:02
+[2023-07-18 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 05:01:02
+[2023-07-18 05:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 06:01:02
+[2023-07-18 06:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 06:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 07:01:02
+[2023-07-18 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 07:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 08:01:02
+[2023-07-18 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 08:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 09:01:02
+[2023-07-18 09:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 09:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 10:01:02
+[2023-07-18 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 10:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 11:01:02
+[2023-07-18 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 11:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 12:01:02
+[2023-07-18 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 13:01:02
+[2023-07-18 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 13:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 14:01:02
+[2023-07-18 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 14:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 15:01:02
+[2023-07-18 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 15:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 16:01:02
+[2023-07-18 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 17:01:02
+[2023-07-18 17:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 17:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 18:01:02
+[2023-07-18 18:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 18:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 19:01:02
+[2023-07-18 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 20:01:02
+[2023-07-18 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 21:01:02
+[2023-07-18 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 21:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 22:01:02
+[2023-07-18 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 22:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-18 23:01:02
+[2023-07-18 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 23:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 00:01:02
+[2023-07-19 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 00:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 01:01:02
+[2023-07-19 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 01:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 02:01:02
+[2023-07-19 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 03:01:02
+[2023-07-19 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 03:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 04:01:03
+[2023-07-19 04:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 05:01:02
+[2023-07-19 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 06:01:02
+[2023-07-19 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 06:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 07:01:03
+[2023-07-19 07:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 07:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 08:01:02
+[2023-07-19 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 08:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 09:01:02
+[2023-07-19 09:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 10:01:03
+[2023-07-19 10:02:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 10:02:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 11:01:02
+[2023-07-19 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 11:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 12:01:02
+[2023-07-19 12:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 13:01:02
+[2023-07-19 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 14:01:02
+[2023-07-19 14:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 15:01:02
+[2023-07-19 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 15:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 16:01:02
+[2023-07-19 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 17:01:02
+[2023-07-19 17:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 17:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 18:01:02
+[2023-07-19 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 18:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 19:01:02
+[2023-07-19 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 19:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 20:01:02
+[2023-07-19 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 20:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 21:01:02
+[2023-07-19 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 21:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 22:01:02
+[2023-07-19 22:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-19 23:01:02
+[2023-07-19 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 23:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 00:01:02
+[2023-07-20 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 00:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 01:01:02
+[2023-07-20 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 01:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 02:01:02
+[2023-07-20 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 03:01:02
+[2023-07-20 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 03:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 04:01:02
+[2023-07-20 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 04:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 05:01:02
+[2023-07-20 05:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 06:01:02
+[2023-07-20 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 06:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 07:01:02
+[2023-07-20 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 07:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 08:01:02
+[2023-07-20 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 08:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 09:01:02
+[2023-07-20 09:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 10:01:02
+[2023-07-20 10:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 10:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 11:01:02
+[2023-07-20 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 11:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 12:01:02
+[2023-07-20 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 12:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 13:01:02
+[2023-07-20 13:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 14:01:02
+[2023-07-20 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 14:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 15:01:02
+[2023-07-20 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 15:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 16:01:02
+[2023-07-20 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 17:01:03
+[2023-07-20 17:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 18:01:02
+[2023-07-20 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 18:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 19:01:02
+[2023-07-20 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 20:01:02
+[2023-07-20 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 21:01:02
+[2023-07-20 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 21:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 22:01:02
+[2023-07-20 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-20 23:01:03
+[2023-07-20 23:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 23:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 00:01:02
+[2023-07-21 00:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 00:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 01:01:02
+[2023-07-21 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 01:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 02:01:03
+[2023-07-21 02:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 03:01:02
+[2023-07-21 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 04:01:02
+[2023-07-21 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 05:01:03
+[2023-07-21 05:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 06:01:03
+[2023-07-21 06:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 06:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 07:01:02
+[2023-07-21 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 07:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 08:01:02
+[2023-07-21 08:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 08:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 09:01:02
+[2023-07-21 09:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 09:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 10:01:02
+[2023-07-21 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 10:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 11:01:02
+[2023-07-21 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 11:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 12:01:03
+[2023-07-21 12:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 13:01:03
+[2023-07-21 13:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 14:01:02
+[2023-07-21 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 14:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 15:01:02
+[2023-07-21 15:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 15:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 16:01:02
+[2023-07-21 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 16:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 17:01:02
+[2023-07-21 17:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 18:01:02
+[2023-07-21 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 18:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 19:01:02
+[2023-07-21 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 19:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 20:01:02
+[2023-07-21 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 20:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 21:01:02
+[2023-07-21 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 21:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 22:01:02
+[2023-07-21 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-21 23:01:02
+[2023-07-21 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 23:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 00:01:02
+[2023-07-22 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 00:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 01:01:02
+[2023-07-22 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 01:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 02:01:02
+[2023-07-22 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 03:01:03
+[2023-07-22 03:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 04:01:02
+[2023-07-22 04:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 05:01:02
+[2023-07-22 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 06:01:02
+[2023-07-22 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 06:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 07:01:02
+[2023-07-22 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 07:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 08:01:02
+[2023-07-22 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 08:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 09:01:03
+[2023-07-22 09:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 10:01:02
+[2023-07-22 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 10:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 11:01:03
+[2023-07-22 11:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 11:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 12:01:02
+[2023-07-22 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 13:01:02
+[2023-07-22 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 14:01:02
+[2023-07-22 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 15:01:02
+[2023-07-22 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 15:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 16:01:02
+[2023-07-22 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 17:01:02
+[2023-07-22 17:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 17:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 18:01:02
+[2023-07-22 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 18:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 19:01:02
+[2023-07-22 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 20:01:02
+[2023-07-22 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 21:01:02
+[2023-07-22 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 21:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 22:01:02
+[2023-07-22 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-22 23:01:03
+[2023-07-22 23:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 23:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 00:01:02
+[2023-07-23 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 00:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 01:01:03
+[2023-07-23 01:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 01:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 02:01:03
+[2023-07-23 02:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 03:01:02
+[2023-07-23 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 04:01:02
+[2023-07-23 04:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 05:01:02
+[2023-07-23 05:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 06:01:03
+[2023-07-23 06:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 06:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 07:01:02
+[2023-07-23 07:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 07:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 08:01:02
+[2023-07-23 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 08:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 09:01:02
+[2023-07-23 09:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 10:01:02
+[2023-07-23 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 10:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 11:01:02
+[2023-07-23 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 11:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 12:01:02
+[2023-07-23 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 13:01:02
+[2023-07-23 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 13:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 14:01:02
+[2023-07-23 14:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 15:01:02
+[2023-07-23 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 15:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 16:01:02
+[2023-07-23 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 16:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 17:01:02
+[2023-07-23 17:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 18:01:02
+[2023-07-23 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 18:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 19:01:02
+[2023-07-23 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 19:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 20:01:02
+[2023-07-23 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 20:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 21:01:02
+[2023-07-23 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 21:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 22:01:02
+[2023-07-23 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-23 23:01:02
+[2023-07-23 23:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 23:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-24 00:01:02
+[2023-07-24 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 00:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-24 01:01:02
+[2023-07-24 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 01:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-24 02:01:02
+[2023-07-24 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 02:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-24 03:01:02
+[2023-07-24 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 03:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-24 04:01:02
+[2023-07-24 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 04:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-24 05:01:02
+[2023-07-24 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 05:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-24 06:01:02
+[2023-07-24 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 06:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-24 07:01:02
+[2023-07-24 07:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 07:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-24 08:01:02
+[2023-07-24 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 08:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-24 09:01:02
+[2023-07-24 09:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-24 10:01:02
+[2023-07-24 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 10:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+IpRecommendApplication 程序运行 - 2023-07-24 11:01:02
+[2023-07-24 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 11:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46)
+ at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8)
+ at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
diff --git a/MPE/spark/iplearning/logs/SubscriberRecommendApplication/running-2023-07.log b/MPE/spark/iplearning/logs/SubscriberRecommendApplication/running-2023-07.log
new file mode 100644
index 0000000..05eddf4
--- /dev/null
+++ b/MPE/spark/iplearning/logs/SubscriberRecommendApplication/running-2023-07.log
@@ -0,0 +1,9673 @@
+SubscriberRecommendApplication 程序运行 - 2023-07-12 04:42:37
+[2023-07-12 04:43:39+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 04:43:39+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 05:02:05
+[2023-07-12 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 06:02:05
+[2023-07-12 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 07:02:05
+[2023-07-12 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 07:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 08:02:05
+[2023-07-12 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 09:02:05
+[2023-07-12 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 09:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 10:02:05
+[2023-07-12 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 11:02:05
+[2023-07-12 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 12:02:05
+[2023-07-12 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 13:02:05
+[2023-07-12 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 14:02:05
+[2023-07-12 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 15:02:05
+[2023-07-12 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 15:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 16:02:05
+[2023-07-12 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 16:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 17:02:05
+[2023-07-12 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 18:02:05
+[2023-07-12 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 18:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 19:02:05
+[2023-07-12 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 19:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 20:02:05
+[2023-07-12 20:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 20:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 21:02:05
+[2023-07-12 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 22:02:05
+[2023-07-12 22:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 22:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-12 23:02:05
+[2023-07-12 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-12 23:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 00:02:05
+[2023-07-13 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 00:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 01:02:05
+[2023-07-13 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 01:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 02:02:05
+[2023-07-13 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 02:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 03:02:05
+[2023-07-13 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 04:02:05
+[2023-07-13 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 04:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 05:02:05
+[2023-07-13 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 05:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 06:02:05
+[2023-07-13 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 07:02:05
+[2023-07-13 07:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 07:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 08:02:05
+[2023-07-13 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 09:02:05
+[2023-07-13 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 09:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 10:02:05
+[2023-07-13 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 11:02:05
+[2023-07-13 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 12:02:05
+[2023-07-13 12:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 12:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 13:02:05
+[2023-07-13 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 14:02:05
+[2023-07-13 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 15:02:05
+[2023-07-13 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 15:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 16:02:05
+[2023-07-13 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 16:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 17:02:05
+[2023-07-13 17:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 18:02:04
+[2023-07-13 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 18:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 19:02:05
+[2023-07-13 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 19:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 20:02:05
+[2023-07-13 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 21:02:05
+[2023-07-13 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 22:02:05
+[2023-07-13 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 22:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-13 23:02:05
+[2023-07-13 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-13 23:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 00:02:05
+[2023-07-14 00:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 00:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 01:02:05
+[2023-07-14 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 01:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 02:02:05
+[2023-07-14 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 02:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 03:02:06
+[2023-07-14 03:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 03:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 04:02:05
+[2023-07-14 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 04:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 05:02:05
+[2023-07-14 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 06:02:05
+[2023-07-14 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 06:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 07:02:05
+[2023-07-14 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 07:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 08:02:05
+[2023-07-14 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 08:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 09:02:05
+[2023-07-14 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 09:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 10:02:05
+[2023-07-14 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 11:02:06
+[2023-07-14 11:03:09+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 11:03:09+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 12:02:05
+[2023-07-14 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 12:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 13:02:05
+[2023-07-14 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 14:02:05
+[2023-07-14 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 15:02:06
+[2023-07-14 15:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 15:03:09+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 16:02:05
+[2023-07-14 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 16:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 17:02:07
+[2023-07-14 17:03:09+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 17:03:10+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 18:02:05
+[2023-07-14 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 18:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 19:02:06
+[2023-07-14 19:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 19:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 20:02:05
+[2023-07-14 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 21:02:05
+[2023-07-14 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 22:02:05
+[2023-07-14 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 22:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-14 23:02:05
+[2023-07-14 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-14 23:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 00:02:05
+[2023-07-15 00:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 00:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+org.apache.spark.SparkException: Exception thrown in awaitResult:
+ at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:205)
+ at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)
+ at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92)
+ at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76)
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+Caused by: org.apache.spark.SparkException: Could not find BlockManagerMaster.
+ at org.apache.spark.rpc.netty.Dispatcher.postMessage(Dispatcher.scala:157)
+ at org.apache.spark.rpc.netty.Dispatcher.postLocalMessage(Dispatcher.scala:132)
+ at org.apache.spark.rpc.netty.NettyRpcEnv.ask(NettyRpcEnv.scala:228)
+ at org.apache.spark.rpc.netty.NettyRpcEndpointRef.ask(NettyRpcEnv.scala:522)
+ at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:91)
+ ... 30 more
+SubscriberRecommendApplication 程序运行 - 2023-07-15 01:02:06
+[2023-07-15 01:03:10+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 01:03:10+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 02:02:05
+[2023-07-15 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 02:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 03:02:05
+[2023-07-15 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 04:02:05
+[2023-07-15 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 04:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 05:02:05
+[2023-07-15 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 06:02:05
+[2023-07-15 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 07:02:05
+[2023-07-15 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 07:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 08:02:05
+[2023-07-15 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 09:02:05
+[2023-07-15 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 09:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 10:02:05
+[2023-07-15 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 11:02:05
+[2023-07-15 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 12:02:05
+[2023-07-15 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 13:02:05
+[2023-07-15 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 14:02:05
+[2023-07-15 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 15:02:05
+[2023-07-15 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 15:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 16:02:05
+[2023-07-15 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 16:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 17:02:05
+[2023-07-15 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 18:02:05
+[2023-07-15 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 18:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 19:02:05
+[2023-07-15 19:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 19:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 20:02:05
+[2023-07-15 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 20:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 21:02:05
+[2023-07-15 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 22:02:05
+[2023-07-15 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 22:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-15 23:02:06
+[2023-07-15 23:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-15 23:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 00:02:05
+[2023-07-16 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 00:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 01:02:05
+[2023-07-16 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 01:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 02:02:05
+[2023-07-16 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 02:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 03:02:05
+[2023-07-16 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 03:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 04:02:05
+[2023-07-16 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 04:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 05:02:05
+[2023-07-16 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 05:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 06:02:05
+[2023-07-16 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 06:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 07:02:05
+[2023-07-16 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 07:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 08:02:06
+[2023-07-16 08:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 08:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 09:02:05
+[2023-07-16 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 09:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 10:02:05
+[2023-07-16 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 10:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 11:02:06
+[2023-07-16 11:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 11:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 12:02:05
+[2023-07-16 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 12:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 13:02:05
+[2023-07-16 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 14:02:05
+[2023-07-16 14:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 14:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 15:02:05
+[2023-07-16 15:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 15:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 16:02:05
+[2023-07-16 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 16:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 17:02:05
+[2023-07-16 17:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 18:02:05
+[2023-07-16 18:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 18:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 19:02:05
+[2023-07-16 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 19:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 20:02:05
+[2023-07-16 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 20:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 21:02:05
+[2023-07-16 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 22:02:05
+[2023-07-16 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 22:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-16 23:02:05
+[2023-07-16 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-16 23:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 00:02:05
+[2023-07-17 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 00:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 01:02:05
+[2023-07-17 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 01:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 02:02:05
+[2023-07-17 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 02:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 03:02:05
+[2023-07-17 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 03:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 04:02:05
+[2023-07-17 04:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 04:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 05:02:05
+[2023-07-17 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 05:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 06:02:05
+[2023-07-17 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 07:02:05
+[2023-07-17 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 07:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 08:02:06
+[2023-07-17 08:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 08:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 09:02:05
+[2023-07-17 09:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 09:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 10:02:05
+[2023-07-17 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 11:02:05
+[2023-07-17 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 12:02:05
+[2023-07-17 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 12:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 13:02:06
+[2023-07-17 13:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 13:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 14:02:05
+[2023-07-17 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 14:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 15:02:05
+[2023-07-17 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 15:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 16:02:05
+[2023-07-17 16:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 16:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 17:02:05
+[2023-07-17 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 18:02:05
+[2023-07-17 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 18:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 19:02:05
+[2023-07-17 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 19:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 20:02:05
+[2023-07-17 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 21:02:05
+[2023-07-17 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 21:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 22:02:05
+[2023-07-17 22:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 22:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-17 23:02:05
+[2023-07-17 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-17 23:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 00:02:05
+[2023-07-18 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 00:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 01:02:05
+[2023-07-18 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 01:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 02:02:05
+[2023-07-18 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 02:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 03:02:05
+[2023-07-18 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 04:02:05
+[2023-07-18 04:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 04:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 05:02:05
+[2023-07-18 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 06:02:05
+[2023-07-18 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 06:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 07:02:05
+[2023-07-18 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 07:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 08:02:05
+[2023-07-18 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 09:02:05
+[2023-07-18 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 09:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 10:02:05
+[2023-07-18 10:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 10:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 11:02:05
+[2023-07-18 11:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 11:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 12:02:05
+[2023-07-18 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 13:02:05
+[2023-07-18 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 14:02:05
+[2023-07-18 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 15:02:04
+[2023-07-18 15:03:06+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 15:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 16:02:05
+[2023-07-18 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 16:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 17:02:05
+[2023-07-18 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 17:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 18:02:05
+[2023-07-18 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 18:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 19:02:05
+[2023-07-18 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 19:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 20:02:05
+[2023-07-18 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 21:02:05
+[2023-07-18 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 22:02:05
+[2023-07-18 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 22:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-18 23:02:05
+[2023-07-18 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-18 23:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 00:02:05
+[2023-07-19 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 00:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 01:02:05
+[2023-07-19 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 01:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 02:02:05
+[2023-07-19 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 02:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 03:02:05
+[2023-07-19 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 04:02:05
+[2023-07-19 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 04:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 05:02:05
+[2023-07-19 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 06:02:05
+[2023-07-19 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 07:02:06
+[2023-07-19 07:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 07:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 08:02:05
+[2023-07-19 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 09:02:05
+[2023-07-19 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 09:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 10:02:07
+[2023-07-19 10:03:10+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 10:03:10+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 11:02:05
+[2023-07-19 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 12:02:05
+[2023-07-19 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 13:02:05
+[2023-07-19 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 14:02:05
+[2023-07-19 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 15:02:05
+[2023-07-19 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 15:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 16:02:05
+[2023-07-19 16:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 16:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 17:02:05
+[2023-07-19 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 17:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 18:02:05
+[2023-07-19 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 18:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 19:02:05
+[2023-07-19 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 19:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 20:02:05
+[2023-07-19 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 21:02:05
+[2023-07-19 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 22:02:05
+[2023-07-19 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 22:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-19 23:02:05
+[2023-07-19 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-19 23:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 00:02:05
+[2023-07-20 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 00:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 01:02:05
+[2023-07-20 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 01:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 02:02:05
+[2023-07-20 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 02:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 03:02:05
+[2023-07-20 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 04:02:05
+[2023-07-20 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 04:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 05:02:05
+[2023-07-20 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 05:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 06:02:05
+[2023-07-20 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 07:02:05
+[2023-07-20 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 07:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 08:02:05
+[2023-07-20 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 09:02:05
+[2023-07-20 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 09:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 10:02:05
+[2023-07-20 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 10:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 11:02:05
+[2023-07-20 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 12:02:05
+[2023-07-20 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 13:02:05
+[2023-07-20 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 13:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 14:02:05
+[2023-07-20 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 15:02:05
+[2023-07-20 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 15:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 16:02:05
+[2023-07-20 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 16:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 17:02:05
+[2023-07-20 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 18:02:05
+[2023-07-20 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 18:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 19:02:05
+[2023-07-20 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 19:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 20:02:05
+[2023-07-20 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 20:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 21:02:05
+[2023-07-20 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 21:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 22:02:05
+[2023-07-20 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 22:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-20 23:02:05
+[2023-07-20 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-20 23:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 00:02:05
+[2023-07-21 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 00:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 01:02:05
+[2023-07-21 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 01:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 02:02:05
+[2023-07-21 02:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 02:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 03:02:05
+[2023-07-21 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 04:02:05
+[2023-07-21 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 04:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 05:02:05
+[2023-07-21 05:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 05:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 06:02:05
+[2023-07-21 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 06:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 07:02:05
+[2023-07-21 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 07:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 08:02:05
+[2023-07-21 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 09:02:05
+[2023-07-21 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 09:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 10:02:05
+[2023-07-21 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 11:02:05
+[2023-07-21 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 11:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 12:02:05
+[2023-07-21 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 13:02:05
+[2023-07-21 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 13:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 14:02:05
+[2023-07-21 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 15:02:05
+[2023-07-21 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 15:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 16:02:05
+[2023-07-21 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 16:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 17:02:05
+[2023-07-21 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 18:02:05
+[2023-07-21 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 18:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 19:02:05
+[2023-07-21 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 19:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 20:02:05
+[2023-07-21 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 21:02:05
+[2023-07-21 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 21:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 22:02:05
+[2023-07-21 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 22:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-21 23:02:05
+[2023-07-21 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-21 23:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 00:02:05
+[2023-07-22 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 00:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 01:02:05
+[2023-07-22 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 01:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 02:02:05
+[2023-07-22 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 02:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 03:02:05
+[2023-07-22 03:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 03:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 04:02:06
+[2023-07-22 04:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 04:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 05:02:05
+[2023-07-22 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 06:02:05
+[2023-07-22 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 07:02:05
+[2023-07-22 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 07:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 08:02:05
+[2023-07-22 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 09:02:05
+[2023-07-22 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 09:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 10:02:05
+[2023-07-22 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 11:02:06
+[2023-07-22 11:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 11:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 12:02:05
+[2023-07-22 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 13:02:05
+[2023-07-22 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 13:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 14:02:05
+[2023-07-22 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 15:02:05
+[2023-07-22 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 15:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 16:02:05
+[2023-07-22 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 16:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 17:02:05
+[2023-07-22 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 17:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 18:02:05
+[2023-07-22 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 18:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 19:02:05
+[2023-07-22 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 19:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 20:02:05
+[2023-07-22 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 21:02:05
+[2023-07-22 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 22:02:05
+[2023-07-22 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 22:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-22 23:02:05
+[2023-07-22 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-22 23:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 00:02:05
+[2023-07-23 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 00:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 01:02:06
+[2023-07-23 01:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 01:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 02:02:05
+[2023-07-23 02:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 02:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 03:02:05
+[2023-07-23 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 04:02:05
+[2023-07-23 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 04:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 05:02:05
+[2023-07-23 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 06:02:05
+[2023-07-23 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 06:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 07:02:05
+[2023-07-23 07:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 07:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64)
+ at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:518)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 08:02:05
+[2023-07-23 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 09:02:05
+[2023-07-23 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 09:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 10:02:05
+[2023-07-23 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 11:02:05
+[2023-07-23 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 12:02:05
+[2023-07-23 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 13:02:05
+[2023-07-23 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 14:02:06
+[2023-07-23 14:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 14:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 15:02:05
+[2023-07-23 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 15:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 16:02:05
+[2023-07-23 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 16:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 17:02:05
+[2023-07-23 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 18:02:05
+[2023-07-23 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 18:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 19:02:05
+[2023-07-23 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 19:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 20:02:05
+[2023-07-23 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 21:02:05
+[2023-07-23 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 21:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 22:02:05
+[2023-07-23 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 22:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-23 23:02:06
+[2023-07-23 23:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-23 23:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-24 00:02:05
+[2023-07-24 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 00:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-24 01:02:05
+[2023-07-24 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 01:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-24 02:02:05
+[2023-07-24 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 02:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-24 03:02:05
+[2023-07-24 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-24 04:02:05
+[2023-07-24 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 04:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-24 05:02:05
+[2023-07-24 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-24 06:02:05
+[2023-07-24 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-24 07:02:05
+[2023-07-24 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 07:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-24 08:02:05
+[2023-07-24 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.NullPointerException
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:567)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-24 09:02:06
+[2023-07-24 09:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 09:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-24 10:02:05
+[2023-07-24 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
+SubscriberRecommendApplication 程序运行 - 2023-07-24 11:02:05
+[2023-07-24 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.>
+[2023-07-24 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.>
+java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem
+ at scala.Predef$.require(Predef.scala:224)
+ at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91)
+ at org.apache.spark.SparkContext.<init>(SparkContext.scala:524)
+ at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918)
+ at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910)
+ at scala.Option.getOrElse(Option.scala:121)
+ at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910)
+ at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30)
+ at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12)
+ at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala)
+ at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19)
+ at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162)
+ at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33)
+ at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61)
+ at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33)
+ at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8)
+ at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
+ at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
+ at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
+ at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
+ at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
diff --git a/MPE/spark/iplearning/runnum b/MPE/spark/iplearning/runnum
new file mode 100644
index 0000000..d00491f
--- /dev/null
+++ b/MPE/spark/iplearning/runnum
@@ -0,0 +1 @@
+1
diff --git a/MPE/spark/iplearning/upconfig.sh b/MPE/spark/iplearning/upconfig.sh
new file mode 100644
index 0000000..1527ffb
--- /dev/null
+++ b/MPE/spark/iplearning/upconfig.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+source /etc/profile
+
+jar_name="ip-learning-spark.jar"
+
+jar -uvf $jar_name application.properties
diff --git a/MPE/spark/sbin/dae-sparkall.sh b/MPE/spark/sbin/dae-sparkall.sh
new file mode 100644
index 0000000..178561c
--- /dev/null
+++ b/MPE/spark/sbin/dae-sparkall.sh
@@ -0,0 +1,54 @@
+#!/bin/bash
+source /etc/profile
+
+BASE_DIR=/data/tsg/olap
+
+VERSION=spark-2.2.3-bin-hadoop2.7
+
+function set_log(){
+RES_SUM_FILE=$BASE_DIR/$VERSION/logs
+
+if [ ! -f "$RES_SUM_FILE/" ]
+then
+ mkdir -p $RES_SUM_FILE
+fi
+
+if [ ! -d "$RES_SUM_FILE/$1" ];then
+ echo "0" > $RES_SUM_FILE/$1
+fi
+
+OLD_NUM=`cat $RES_SUM_FILE/$1`
+RESTART_NUM=`expr $OLD_NUM + 1`
+echo $RESTART_NUM > $RES_SUM_FILE/$1
+if [ $OLD_NUM -eq "0" ];then
+ echo "`date "+%Y-%m-%d %H:%M:%S"` - Spark $2服务初次启动" >> $BASE_DIR/$VERSION/logs/restart.log
+else
+ echo "`date +%Y-%m-%d` `date +%H:%M:%S` - Spark $2服务异常 - 重启次数 -> $RESTART_NUM." >> $BASE_DIR/$VERSION/logs/restart.log
+fi
+}
+
+
+while true ; do
+
+HAS_MA=`jps -l | grep -w "org.apache.spark.deploy.master.Master" | grep -v grep |wc -l`
+HAS_HI=`jps -l | grep -w "org.apache.spark.deploy.history.HistoryServer" | grep -v grep |wc -l`
+HAS_WO=`jps -l | grep -w "org.apache.spark.deploy.worker.Worker" | grep -v grep |wc -l`
+
+
+if [ $HAS_MA -eq "0" ];then
+ $BASE_DIR/$VERSION/sbin/start-master.sh
+ set_log maRes_sum Master
+fi
+
+if [ $HAS_HI -eq "0" ];then
+ $BASE_DIR/$VERSION/sbin/start-history-server.sh
+ set_log hiRes_sum HistoryServer
+fi
+
+if [ $HAS_WO -eq "0" ];then
+ $BASE_DIR/$VERSION/sbin/start-slave.sh spark://192.168.20.223:7077
+ set_log woRes_sum Worker
+fi
+
+sleep 60
+done
diff --git a/MPE/spark/sbin/set_spark_env.sh b/MPE/spark/sbin/set_spark_env.sh
new file mode 100644
index 0000000..3a351b2
--- /dev/null
+++ b/MPE/spark/sbin/set_spark_env.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+
+echo -e "\n#spark\nexport SPARK_HOME=/data/tsg/olap/spark-2.2.3-bin-hadoop2.7\nexport PATH=\$SPARK_HOME/sbin:\$PATH\nexport PATH=\$SPARK_HOME/bin:\$PATH" >> /etc/profile.d/spark.sh
+chmod +x /etc/profile.d/spark.sh
+source /etc/profile
+
+keeppath='/etc/init.d/keepsparkall'
+if [ -x $keeppath ];then
+ chkconfig --add keepsparkall
+ chkconfig keepsparkall on
+ service keepsparkall start && sleep 5
+ all_dae=`ps -ef | grep dae-sparkall.sh | grep -v grep | wc -l`
+ if [ $all_dae -eq "0" ];then
+ nohup /data/tsg/olap/spark-2.2.3-bin-hadoop2.7/sbin/dae-sparkall.sh > /dev/null 2>&1 &
+ fi
+fi
+
+keeppath='/etc/init.d/keepsparkmaster'
+if [ -x $keeppath ];then
+ chkconfig --add keepsparkmaster
+ chkconfig keepsparkmaster on
+ service keepsparkmaster start && sleep 5
+ master_dae=`ps -ef | grep dae-sparkmaster.sh | grep -v grep | wc -l`
+ if [ $master_dae -eq "0" ];then
+ nohup /data/tsg/olap/spark-2.2.3-bin-hadoop2.7/sbin/dae-sparkmaster.sh > /dev/null 2>&1 &
+ fi
+fi
+
+keeppath='/etc/init.d/keepsparkworker'
+if [ -x $keeppath ];then
+ chkconfig --add keepsparkworker
+ chkconfig keepsparkworker on
+ service keepsparkworker start && sleep 5
+ worker_dae=`ps -ef | grep dae-sparkworker.sh | grep -v grep | wc -l`
+ if [ $worker_dae -eq "0" ];then
+ nohup /data/tsg/olap/spark-2.2.3-bin-hadoop2.7/sbin/dae-sparkworker.sh > /dev/null 2>&1 &
+ fi
+fi
+
diff --git a/MPE/spark/sbin/slaves.sh b/MPE/spark/sbin/slaves.sh
new file mode 100644
index 0000000..c971aa3
--- /dev/null
+++ b/MPE/spark/sbin/slaves.sh
@@ -0,0 +1,103 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Run a shell command on all slave hosts.
+#
+# Environment Variables
+#
+# SPARK_SLAVES File naming remote hosts.
+# Default is ${SPARK_CONF_DIR}/slaves.
+# SPARK_CONF_DIR Alternate conf dir. Default is ${SPARK_HOME}/conf.
+# SPARK_SLAVE_SLEEP Seconds to sleep between spawning remote commands.
+# SPARK_SSH_OPTS Options passed to ssh when running remote commands.
+##
+
+usage="Usage: slaves.sh [--config <conf-dir>] command..."
+
+# if no args specified, show usage
+if [ $# -le 0 ]; then
+ echo $usage
+ exit 1
+fi
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+# If the slaves file is specified in the command line,
+# then it takes precedence over the definition in
+# spark-env.sh. Save it here.
+if [ -f "$SPARK_SLAVES" ]; then
+ HOSTLIST=`cat "$SPARK_SLAVES"`
+fi
+
+# Check if --config is passed as an argument. It is an optional parameter.
+# Exit if the argument is not a directory.
+if [ "$1" == "--config" ]
+then
+ shift
+ conf_dir="$1"
+ if [ ! -d "$conf_dir" ]
+ then
+ echo "ERROR : $conf_dir is not a directory"
+ echo $usage
+ exit 1
+ else
+ export SPARK_CONF_DIR="$conf_dir"
+ fi
+ shift
+fi
+
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+if [ "$HOSTLIST" = "" ]; then
+ if [ "$SPARK_SLAVES" = "" ]; then
+ if [ -f "${SPARK_CONF_DIR}/slaves" ]; then
+ HOSTLIST=`cat "${SPARK_CONF_DIR}/slaves"`
+ else
+ HOSTLIST=localhost
+ fi
+ else
+ HOSTLIST=`cat "${SPARK_SLAVES}"`
+ fi
+fi
+
+
+
+# By default disable strict host key checking
+if [ "$SPARK_SSH_OPTS" = "" ]; then
+ SPARK_SSH_OPTS="-o StrictHostKeyChecking=no"
+fi
+
+for slave in `echo "$HOSTLIST"|sed "s/#.*$//;/^$/d"`; do
+ if [ -n "${SPARK_SSH_FOREGROUND}" ]; then
+ ssh $SPARK_SSH_OPTS "$slave" $"${@// /\\ }" \
+ 2>&1 | sed "s/^/$slave: /"
+ else
+ ssh $SPARK_SSH_OPTS "$slave" $"${@// /\\ }" \
+ 2>&1 | sed "s/^/$slave: /" &
+ fi
+ if [ "$SPARK_SLAVE_SLEEP" != "" ]; then
+ sleep $SPARK_SLAVE_SLEEP
+ fi
+done
+
+wait
diff --git a/MPE/spark/sbin/spark-config.sh b/MPE/spark/sbin/spark-config.sh
new file mode 100644
index 0000000..bf3da18
--- /dev/null
+++ b/MPE/spark/sbin/spark-config.sh
@@ -0,0 +1,33 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# included in all the spark scripts with source command
+# should not be executable directly
+# also should not be passed any arguments, since we need original $*
+
+# symlink and absolute path should rely on SPARK_HOME to resolve
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+export SPARK_CONF_DIR="${SPARK_CONF_DIR:-"${SPARK_HOME}/conf"}"
+# Add the PySpark classes to the PYTHONPATH:
+if [ -z "${PYSPARK_PYTHONPATH_SET}" ]; then
+ export PYTHONPATH="${SPARK_HOME}/python:${PYTHONPATH}"
+ export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.7-src.zip:${PYTHONPATH}"
+ export PYSPARK_PYTHONPATH_SET=1
+fi
diff --git a/MPE/spark/sbin/spark-daemon.sh b/MPE/spark/sbin/spark-daemon.sh
new file mode 100644
index 0000000..c227c98
--- /dev/null
+++ b/MPE/spark/sbin/spark-daemon.sh
@@ -0,0 +1,242 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Runs a Spark command as a daemon.
+#
+# Environment Variables
+#
+# SPARK_CONF_DIR Alternate conf dir. Default is ${SPARK_HOME}/conf.
+# SPARK_LOG_DIR Where log files are stored. ${SPARK_HOME}/logs by default.
+# SPARK_MASTER host:path where spark code should be rsync'd from
+# SPARK_PID_DIR The pid files are stored. /tmp by default.
+# SPARK_IDENT_STRING A string representing this instance of spark. $USER by default
+# SPARK_NICENESS The scheduling priority for daemons. Defaults to 0.
+# SPARK_NO_DAEMONIZE If set, will run the proposed command in the foreground. It will not output a PID file.
+##
+
+usage="Usage: spark-daemon.sh [--config <conf-dir>] (start|stop|submit|status) <spark-command> <spark-instance-number> <args...>"
+
+# if no args specified, show usage
+if [ $# -le 1 ]; then
+ echo $usage
+ exit 1
+fi
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+# get arguments
+
+# Check if --config is passed as an argument. It is an optional parameter.
+# Exit if the argument is not a directory.
+
+if [ "$1" == "--config" ]
+then
+ shift
+ conf_dir="$1"
+ if [ ! -d "$conf_dir" ]
+ then
+ echo "ERROR : $conf_dir is not a directory"
+ echo $usage
+ exit 1
+ else
+ export SPARK_CONF_DIR="$conf_dir"
+ fi
+ shift
+fi
+
+option=$1
+shift
+command=$1
+shift
+instance=$1
+shift
+
+spark_rotate_log ()
+{
+ log=$1;
+ num=5;
+ if [ -n "$2" ]; then
+ num=$2
+ fi
+ if [ -f "$log" ]; then # rotate logs
+ while [ $num -gt 1 ]; do
+ prev=`expr $num - 1`
+ [ -f "$log.$prev" ] && mv "$log.$prev" "$log.$num"
+ num=$prev
+ done
+ mv "$log" "$log.$num";
+ fi
+}
+
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+if [ "$SPARK_IDENT_STRING" = "" ]; then
+ export SPARK_IDENT_STRING="$USER"
+fi
+
+
+export SPARK_PRINT_LAUNCH_COMMAND="1"
+
+# get log directory
+if [ "$SPARK_LOG_DIR" = "" ]; then
+ export SPARK_LOG_DIR="${SPARK_HOME}/logs"
+fi
+mkdir -p "$SPARK_LOG_DIR"
+touch "$SPARK_LOG_DIR"/.spark_test > /dev/null 2>&1
+TEST_LOG_DIR=$?
+if [ "${TEST_LOG_DIR}" = "0" ]; then
+ rm -f "$SPARK_LOG_DIR"/.spark_test
+else
+ chown "$SPARK_IDENT_STRING" "$SPARK_LOG_DIR"
+fi
+
+if [ "$SPARK_PID_DIR" = "" ]; then
+ SPARK_PID_DIR=/tmp
+fi
+
+# some variables
+log="$SPARK_LOG_DIR/spark-$SPARK_IDENT_STRING-$command-$instance-$HOSTNAME.out"
+pid="$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command-$instance.pid"
+
+# Set default scheduling priority
+if [ "$SPARK_NICENESS" = "" ]; then
+ export SPARK_NICENESS=0
+fi
+
+execute_command() {
+ if [ -z ${SPARK_NO_DAEMONIZE+set} ]; then
+ nohup -- "$@" >> $log 2>&1 < /dev/null &
+ newpid="$!"
+
+ echo "$newpid" > "$pid"
+
+ # Poll for up to 5 seconds for the java process to start
+ for i in {1..10}
+ do
+ if [[ $(ps -p "$newpid" -o comm=) =~ "java" ]]; then
+ break
+ fi
+ sleep 0.5
+ done
+
+ sleep 2
+ # Check if the process has died; in that case we'll tail the log so the user can see
+ if [[ ! $(ps -p "$newpid" -o comm=) =~ "java" ]]; then
+ echo "failed to launch: $@"
+ tail -2 "$log" | sed 's/^/ /'
+ echo "full log in $log"
+ fi
+ else
+ "$@"
+ fi
+}
+
+run_command() {
+ mode="$1"
+ shift
+
+ mkdir -p "$SPARK_PID_DIR"
+
+ if [ -f "$pid" ]; then
+ TARGET_ID="$(cat "$pid")"
+ if [[ $(ps -p "$TARGET_ID" -o comm=) =~ "java" ]]; then
+ echo "$command running as process $TARGET_ID. Stop it first."
+ exit 1
+ fi
+ fi
+
+ if [ "$SPARK_MASTER" != "" ]; then
+ echo rsync from "$SPARK_MASTER"
+ rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' "$SPARK_MASTER/" "${SPARK_HOME}"
+ fi
+
+ spark_rotate_log "$log"
+ echo "starting $command, logging to $log"
+
+ case "$mode" in
+ (class)
+ execute_command nice -n "$SPARK_NICENESS" "${SPARK_HOME}"/bin/spark-class "$command" "$@"
+ ;;
+
+ (submit)
+ execute_command nice -n "$SPARK_NICENESS" bash "${SPARK_HOME}"/bin/spark-submit --class "$command" "$@"
+ ;;
+
+ (*)
+ echo "unknown mode: $mode"
+ exit 1
+ ;;
+ esac
+
+}
+
+case $option in
+
+ (submit)
+ run_command submit "$@"
+ ;;
+
+ (start)
+ run_command class "$@"
+ ;;
+
+ (stop)
+
+ if [ -f $pid ]; then
+ TARGET_ID="$(cat "$pid")"
+ if [[ $(ps -p "$TARGET_ID" -o comm=) =~ "java" ]]; then
+ echo "stopping $command"
+ kill "$TARGET_ID" && rm -f "$pid"
+ else
+ echo "no $command to stop"
+ fi
+ else
+ echo "no $command to stop"
+ fi
+ ;;
+
+ (status)
+
+ if [ -f $pid ]; then
+ TARGET_ID="$(cat "$pid")"
+ if [[ $(ps -p "$TARGET_ID" -o comm=) =~ "java" ]]; then
+ echo $command is running.
+ exit 0
+ else
+ echo $pid file is present but $command not running
+ exit 1
+ fi
+ else
+ echo $command not running.
+ exit 2
+ fi
+ ;;
+
+ (*)
+ echo $usage
+ exit 1
+ ;;
+
+esac
+
+
diff --git a/MPE/spark/sbin/spark-daemons.sh b/MPE/spark/sbin/spark-daemons.sh
new file mode 100644
index 0000000..dec2f44
--- /dev/null
+++ b/MPE/spark/sbin/spark-daemons.sh
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Run a Spark command on all slave hosts.
+
+usage="Usage: spark-daemons.sh [--config <conf-dir>] [start|stop] command instance-number args..."
+
+# if no args specified, show usage
+if [ $# -le 1 ]; then
+ echo $usage
+ exit 1
+fi
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+exec "${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin/spark-daemon.sh" "$@"
diff --git a/MPE/spark/sbin/start-all.sh b/MPE/spark/sbin/start-all.sh
new file mode 100644
index 0000000..a5d30d2
--- /dev/null
+++ b/MPE/spark/sbin/start-all.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Start all spark daemons.
+# Starts the master on this node.
+# Starts a worker on each node specified in conf/slaves
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+# Load the Spark configuration
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+# Start Master
+"${SPARK_HOME}/sbin"/start-master.sh
+
+# Start Workers
+"${SPARK_HOME}/sbin"/start-slaves.sh
diff --git a/MPE/spark/sbin/start-history-server.sh b/MPE/spark/sbin/start-history-server.sh
new file mode 100644
index 0000000..38a43b9
--- /dev/null
+++ b/MPE/spark/sbin/start-history-server.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Starts the history server on the machine this script is executed on.
+#
+# Usage: start-history-server.sh
+#
+# Use the SPARK_HISTORY_OPTS environment variable to set history server configuration.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1 "$@"
diff --git a/MPE/spark/sbin/start-master.sh b/MPE/spark/sbin/start-master.sh
new file mode 100644
index 0000000..97ee321
--- /dev/null
+++ b/MPE/spark/sbin/start-master.sh
@@ -0,0 +1,67 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Starts the master on the machine this script is executed on.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+# NOTE: This exact class name is matched downstream by SparkSubmit.
+# Any changes need to be reflected there.
+CLASS="org.apache.spark.deploy.master.Master"
+
+if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
+ echo "Usage: ./sbin/start-master.sh [options]"
+ pattern="Usage:"
+ pattern+="\|Using Spark's default log4j profile:"
+ pattern+="\|Registered signal handlers for"
+
+ "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
+ exit 1
+fi
+
+ORIGINAL_ARGS="$@"
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+if [ "$SPARK_MASTER_PORT" = "" ]; then
+ SPARK_MASTER_PORT=7077
+fi
+
+if [ "$SPARK_MASTER_HOST" = "" ]; then
+ case `uname` in
+ (SunOS)
+ SPARK_MASTER_HOST="`/usr/sbin/check-hostname | awk '{print $NF}'`"
+ ;;
+ (*)
+ SPARK_MASTER_HOST="`hostname -f`"
+ ;;
+ esac
+fi
+
+if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then
+ SPARK_MASTER_WEBUI_PORT=8080
+fi
+
+"${SPARK_HOME}/sbin"/spark-daemon.sh start $CLASS 1 \
+ --host $SPARK_MASTER_HOST --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT \
+ $ORIGINAL_ARGS
diff --git a/MPE/spark/sbin/start-mesos-dispatcher.sh b/MPE/spark/sbin/start-mesos-dispatcher.sh
new file mode 100644
index 0000000..ecaad7a
--- /dev/null
+++ b/MPE/spark/sbin/start-mesos-dispatcher.sh
@@ -0,0 +1,51 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Starts the Mesos Cluster Dispatcher on the machine this script is executed on.
+# The Mesos Cluster Dispatcher is responsible for launching the Mesos framework and
+# Rest server to handle driver requests for Mesos cluster mode.
+# Only one cluster dispatcher is needed per Mesos cluster.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+if [ "$SPARK_MESOS_DISPATCHER_PORT" = "" ]; then
+ SPARK_MESOS_DISPATCHER_PORT=7077
+fi
+
+if [ "$SPARK_MESOS_DISPATCHER_HOST" = "" ]; then
+ case `uname` in
+ (SunOS)
+ SPARK_MESOS_DISPATCHER_HOST="`/usr/sbin/check-hostname | awk '{print $NF}'`"
+ ;;
+ (*)
+ SPARK_MESOS_DISPATCHER_HOST="`hostname -f`"
+ ;;
+ esac
+fi
+
+if [ "$SPARK_MESOS_DISPATCHER_NUM" = "" ]; then
+ SPARK_MESOS_DISPATCHER_NUM=1
+fi
+
+"${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.mesos.MesosClusterDispatcher $SPARK_MESOS_DISPATCHER_NUM --host $SPARK_MESOS_DISPATCHER_HOST --port $SPARK_MESOS_DISPATCHER_PORT "$@"
diff --git a/MPE/spark/sbin/start-mesos-shuffle-service.sh b/MPE/spark/sbin/start-mesos-shuffle-service.sh
new file mode 100644
index 0000000..1845845
--- /dev/null
+++ b/MPE/spark/sbin/start-mesos-shuffle-service.sh
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Starts the Mesos external shuffle server on the machine this script is executed on.
+# The Mesos external shuffle service detects when an application exits and automatically
+# cleans up its shuffle files.
+#
+# Usage: start-mesos-shuffle-server.sh
+#
+# Use the SPARK_SHUFFLE_OPTS environment variable to set shuffle service configuration.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.mesos.MesosExternalShuffleService 1
diff --git a/MPE/spark/sbin/start-shuffle-service.sh b/MPE/spark/sbin/start-shuffle-service.sh
new file mode 100644
index 0000000..793e165
--- /dev/null
+++ b/MPE/spark/sbin/start-shuffle-service.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Starts the external shuffle server on the machine this script is executed on.
+#
+# Usage: start-shuffle-server.sh
+#
+# Use the SPARK_SHUFFLE_OPTS environment variable to set shuffle server configuration.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.ExternalShuffleService 1
diff --git a/MPE/spark/sbin/start-slave.sh b/MPE/spark/sbin/start-slave.sh
new file mode 100644
index 0000000..a22c00d
--- /dev/null
+++ b/MPE/spark/sbin/start-slave.sh
@@ -0,0 +1,91 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Starts a slave on the machine this script is executed on.
+#
+# Environment Variables
+#
+# SPARK_WORKER_INSTANCES The number of worker instances to run on this
+# slave. Default is 1.
+# SPARK_WORKER_PORT The base port number for the first worker. If set,
+# subsequent workers will increment this number. If
+# unset, Spark will find a valid port number, but
+# with no guarantee of a predictable pattern.
+# SPARK_WORKER_WEBUI_PORT The base port for the web interface of the first
+# worker. Subsequent workers will increment this
+# number. Default is 8081.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+# NOTE: This exact class name is matched downstream by SparkSubmit.
+# Any changes need to be reflected there.
+CLASS="org.apache.spark.deploy.worker.Worker"
+
+if [[ $# -lt 1 ]] || [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
+ echo "Usage: ./sbin/start-slave.sh [options] <master>"
+ pattern="Usage:"
+ pattern+="\|Using Spark's default log4j profile:"
+ pattern+="\|Registered signal handlers for"
+
+ "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
+ exit 1
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+# First argument should be the master; we need to store it aside because we may
+# need to insert arguments between it and the other arguments
+MASTER=$1
+shift
+
+# Determine desired worker port
+if [ "$SPARK_WORKER_WEBUI_PORT" = "" ]; then
+ SPARK_WORKER_WEBUI_PORT=7081
+fi
+
+# Start up the appropriate number of workers on this machine.
+# quick local function to start a worker
+function start_instance {
+ WORKER_NUM=$1
+ shift
+
+ if [ "$SPARK_WORKER_PORT" = "" ]; then
+ PORT_FLAG=
+ PORT_NUM=
+ else
+ PORT_FLAG="--port"
+ PORT_NUM=$(( $SPARK_WORKER_PORT + $WORKER_NUM - 1 ))
+ fi
+ WEBUI_PORT=$(( $SPARK_WORKER_WEBUI_PORT + $WORKER_NUM - 1 ))
+
+ "${SPARK_HOME}/sbin"/spark-daemon.sh start $CLASS $WORKER_NUM \
+ --webui-port "$WEBUI_PORT" $PORT_FLAG $PORT_NUM $MASTER "$@"
+}
+
+if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
+ start_instance 1 "$@"
+else
+ for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
+ start_instance $(( 1 + $i )) "$@"
+ done
+fi
diff --git a/MPE/spark/sbin/start-slaves.sh b/MPE/spark/sbin/start-slaves.sh
new file mode 100644
index 0000000..f5269df
--- /dev/null
+++ b/MPE/spark/sbin/start-slaves.sh
@@ -0,0 +1,46 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Starts a slave instance on each machine specified in the conf/slaves file.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+# Find the port number for the master
+if [ "$SPARK_MASTER_PORT" = "" ]; then
+ SPARK_MASTER_PORT=7077
+fi
+
+if [ "$SPARK_MASTER_HOST" = "" ]; then
+ case `uname` in
+ (SunOS)
+ SPARK_MASTER_HOST="`/usr/sbin/check-hostname | awk '{print $NF}'`"
+ ;;
+ (*)
+ SPARK_MASTER_HOST="`hostname -f`"
+ ;;
+ esac
+fi
+
+# Launch the slaves
+"${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin/start-slave.sh" "spark://$SPARK_MASTER_HOST:$SPARK_MASTER_PORT"
diff --git a/MPE/spark/sbin/start-thriftserver.sh b/MPE/spark/sbin/start-thriftserver.sh
new file mode 100644
index 0000000..f02f317
--- /dev/null
+++ b/MPE/spark/sbin/start-thriftserver.sh
@@ -0,0 +1,56 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Shell script for starting the Spark SQL Thrift server
+
+# Enter posix mode for bash
+set -o posix
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+# NOTE: This exact class name is matched downstream by SparkSubmit.
+# Any changes need to be reflected there.
+CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
+
+function usage {
+ echo "Usage: ./sbin/start-thriftserver [options] [thrift server options]"
+ pattern="usage"
+ pattern+="\|Spark assembly has been built with Hive"
+ pattern+="\|NOTE: SPARK_PREPEND_CLASSES is set"
+ pattern+="\|Spark Command: "
+ pattern+="\|======="
+ pattern+="\|--help"
+
+ "${SPARK_HOME}"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
+ echo
+ echo "Thrift server options:"
+ "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
+}
+
+if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
+ usage
+ exit 0
+fi
+
+export SUBMIT_USAGE_FUNCTION=usage
+
+exec "${SPARK_HOME}"/sbin/spark-daemon.sh submit $CLASS 1 --name "Thrift JDBC/ODBC Server" "$@"
diff --git a/MPE/spark/sbin/stop-all.sh b/MPE/spark/sbin/stop-all.sh
new file mode 100644
index 0000000..4e476ca
--- /dev/null
+++ b/MPE/spark/sbin/stop-all.sh
@@ -0,0 +1,49 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Stop all spark daemons.
+# Run this on the master node.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+# Load the Spark configuration
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+# Stop the slaves, then the master
+"${SPARK_HOME}/sbin"/stop-slaves.sh
+"${SPARK_HOME}/sbin"/stop-master.sh
+
+if [ "$1" == "--wait" ]
+then
+ printf "Waiting for workers to shut down..."
+ while true
+ do
+ running=`${SPARK_HOME}/sbin/slaves.sh ps -ef | grep -v grep | grep deploy.worker.Worker`
+ if [ -z "$running" ]
+ then
+ printf "\nAll workers successfully shut down.\n"
+ break
+ else
+ printf "."
+ sleep 10
+ fi
+ done
+fi
diff --git a/MPE/spark/sbin/stop-history-server.sh b/MPE/spark/sbin/stop-history-server.sh
new file mode 100644
index 0000000..14e3af4
--- /dev/null
+++ b/MPE/spark/sbin/stop-history-server.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Stops the history server on the machine this script is executed on.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+"${SPARK_HOME}/sbin/spark-daemon.sh" stop org.apache.spark.deploy.history.HistoryServer 1
diff --git a/MPE/spark/sbin/stop-master.sh b/MPE/spark/sbin/stop-master.sh
new file mode 100644
index 0000000..14644ea
--- /dev/null
+++ b/MPE/spark/sbin/stop-master.sh
@@ -0,0 +1,28 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Stops the master on the machine this script is executed on.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
diff --git a/MPE/spark/sbin/stop-mesos-dispatcher.sh b/MPE/spark/sbin/stop-mesos-dispatcher.sh
new file mode 100644
index 0000000..b13e018
--- /dev/null
+++ b/MPE/spark/sbin/stop-mesos-dispatcher.sh
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Stop the Mesos Cluster dispatcher on the machine this script is executed on.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+if [ "$SPARK_MESOS_DISPATCHER_NUM" = "" ]; then
+ SPARK_MESOS_DISPATCHER_NUM=1
+fi
+
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.mesos.MesosClusterDispatcher \
+ $SPARK_MESOS_DISPATCHER_NUM
+
diff --git a/MPE/spark/sbin/stop-mesos-shuffle-service.sh b/MPE/spark/sbin/stop-mesos-shuffle-service.sh
new file mode 100644
index 0000000..d23cad3
--- /dev/null
+++ b/MPE/spark/sbin/stop-mesos-shuffle-service.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Stops the Mesos external shuffle service on the machine this script is executed on.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.mesos.MesosExternalShuffleService 1
diff --git a/MPE/spark/sbin/stop-shuffle-service.sh b/MPE/spark/sbin/stop-shuffle-service.sh
new file mode 100644
index 0000000..50d69cf
--- /dev/null
+++ b/MPE/spark/sbin/stop-shuffle-service.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Stops the external shuffle service on the machine this script is executed on.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.ExternalShuffleService 1
diff --git a/MPE/spark/sbin/stop-slave.sh b/MPE/spark/sbin/stop-slave.sh
new file mode 100644
index 0000000..685bcf5
--- /dev/null
+++ b/MPE/spark/sbin/stop-slave.sh
@@ -0,0 +1,44 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# A shell script to stop all workers on a single slave
+#
+# Environment variables
+#
+# SPARK_WORKER_INSTANCES The number of worker instances that should be
+# running on this slave. Default is 1.
+
+# Usage: stop-slave.sh
+# Stops all slaves on this worker machine
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
+ "${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker 1
+else
+ for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
+ "${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
+ done
+fi
diff --git a/MPE/spark/sbin/stop-slaves.sh b/MPE/spark/sbin/stop-slaves.sh
new file mode 100644
index 0000000..a57441b
--- /dev/null
+++ b/MPE/spark/sbin/stop-slaves.sh
@@ -0,0 +1,28 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+. "${SPARK_HOME}/sbin/spark-config.sh"
+
+. "${SPARK_HOME}/bin/load-spark-env.sh"
+
+"${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin"/stop-slave.sh
diff --git a/MPE/spark/sbin/stop-thriftserver.sh b/MPE/spark/sbin/stop-thriftserver.sh
new file mode 100644
index 0000000..cf45058
--- /dev/null
+++ b/MPE/spark/sbin/stop-thriftserver.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Stops the thrift server on the machine this script is executed on.
+
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 1